diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 655ffe289e..e6badf936a 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -1,6 +1,6 @@ name: Bug Report description: File a bug report -title: 'Bug: ' +title: 'Your issue title here' labels: - 'type: bug' body: diff --git a/.github/ISSUE_TEMPLATE/enhancement_request.yml b/.github/ISSUE_TEMPLATE/enhancement_request.yml index 52b49e0481..31b2eb2edd 100644 --- a/.github/ISSUE_TEMPLATE/enhancement_request.yml +++ b/.github/ISSUE_TEMPLATE/enhancement_request.yml @@ -1,6 +1,6 @@ name: Enhancement Request description: Create a report to help us enhance a particular feature -title: "Enhancement: " +title: "Your issue title here" labels: - "type: enhancement" body: @@ -49,4 +49,4 @@ body: label: "Additional context:" description: Add any other context or screenshots about the enhancement request here. validations: - required: false \ No newline at end of file + required: false diff --git a/.github/workflows/pr_linting.yml b/.github/workflows/pr_linting.yml new file mode 100644 index 0000000000..3d2431b69a --- /dev/null +++ b/.github/workflows/pr_linting.yml @@ -0,0 +1,24 @@ +name: ๐Ÿ“‡ Code Linting + +on: + push: + branches: [ develop ] + pull_request: + branches: [ develop ] + + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number}} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 diff --git a/.gitignore b/.gitignore index 502cf85b9f..acbc3e2572 100644 --- a/.gitignore +++ b/.gitignore @@ -77,6 +77,7 @@ dump.sql # Poetry ######## +.poetry/ .python-version .editorconfig .pre-commit-config.yaml diff --git a/.hound.yml b/.hound.yml index df9cdab64a..de5adb3154 100644 --- a/.hound.yml +++ b/.hound.yml @@ -1,3 +1,3 @@ -flake8: - enabled: true - config_file: setup.cfg +flake8: + enabled: true + config_file: setup.cfg diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eec388924e..8aa3e1b81b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,27 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files - - id: no-commit-to-branch - args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ] + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: no-commit-to-branch + args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ] + - repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + additional_dependencies: + - tomli + + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.3.3 + hooks: + # Run the linter. + - id: ruff + # Run the formatter. + # - id: ruff-format diff --git a/README.md b/README.md index e2aa98bb16..11a660e643 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -AYON Core addon -======== +AYON Core Addon +=============== -AYON core provides the base building blocks for all other AYON addons and integrations and is responsible for discovery and initialization of other addons. +AYON core provides the base building blocks for all other AYON addons and integrations and is responsible for discovery and initialization of other addons. - Some of its key functions include: - It is used as the main command line handler in [ayon-launcher](https://github.com/ynput/ayon-launcher) application. @@ -13,8 +13,20 @@ AYON core provides the base building blocks for all other AYON addons and integr - Defines pipeline API used by other integrations - Provides all graphical tools for artists - Defines AYON QT styling -- A bunch more things +- A bunch more things -Together with [ayon-launcher](https://github.com/ynput/ayon-launcher) , they form the base of AYON pipeline and is one of few compulsory addons for AYON pipeline to be useful in a meaningful way. +Together with [ayon-launcher](https://github.com/ynput/ayon-launcher) , they form the base of AYON pipeline and is one of few compulsory addons for AYON pipeline to be useful in a meaningful way. -AYON-core is a successor to OpenPype repository (minus all the addons) and still in the process of cleaning up of all references. Please bear with us during this transitional phase. +AYON-core is a successor to [OpenPype repository](https://github.com/ynput/OpenPype) (minus all the addons) and still in the process of cleaning up of all references. Please bear with us during this transitional phase. + +Development and testing notes +----------------------------- +There is `pyproject.toml` file in the root of the repository. This file is used to define the development environment and is used by `poetry` to create a virtual environment. +This virtual environment is used to run tests and to develop the code, to help with +linting and formatting. Dependencies defined here are not used in actual addon +deployment - for that you need to edit `./client/pyproject.toml` file. That file +will be then processed [ayon-dependencies-tool](https://github.com/ynput/ayon-dependencies-tool) +to create dependency package. + +Right now, this file needs to by synced with dependencies manually, but in the future +we plan to automate process of development environment creation. diff --git a/client/ayon_core/__init__.py b/client/ayon_core/__init__.py index 5f9eb6cea3..ce5a28601c 100644 --- a/client/ayon_core/__init__.py +++ b/client/ayon_core/__init__.py @@ -1,12 +1,28 @@ import os +from .version import __version__ AYON_CORE_ROOT = os.path.dirname(os.path.abspath(__file__)) -# TODO remove after '1.x.x' +# ------------------------- +# DEPRECATED - Remove before '1.x.x' release +# ------------------------- PACKAGE_DIR = AYON_CORE_ROOT PLUGINS_DIR = os.path.join(AYON_CORE_ROOT, "plugins") AYON_SERVER_ENABLED = True # Indicate if AYON entities should be used instead of OpenPype entities -USE_AYON_ENTITIES = False +USE_AYON_ENTITIES = True +# ------------------------- + + +__all__ = ( + "__version__", + + # Deprecated + "AYON_CORE_ROOT", + "PACKAGE_DIR", + "PLUGINS_DIR", + "AYON_SERVER_ENABLED", + "USE_AYON_ENTITIES", +) diff --git a/client/ayon_core/addon/README.md b/client/ayon_core/addon/README.md index a15e8bdc69..88c27db154 100644 --- a/client/ayon_core/addon/README.md +++ b/client/ayon_core/addon/README.md @@ -27,7 +27,7 @@ AYON addons should contain separated logic of specific kind of implementation, s - default interfaces are defined in `interfaces.py` ## IPluginPaths -- addon wants to add directory path/s to avalon or publish plugins +- addon wants to add directory path/s to publish, load, create or inventory plugins - addon must implement `get_plugin_paths` which must return dictionary with possible keys `"publish"`, `"load"`, `"create"` or `"actions"` - each key may contain list or string with a path to directory with plugins @@ -89,4 +89,4 @@ AYON addons should contain separated logic of specific kind of implementation, s ### TrayAddonsManager - inherits from `AddonsManager` -- has specific implementation for Pype Tray tool and handle `ITrayAddon` methods +- has specific implementation for AYON Tray and handle `ITrayAddon` methods diff --git a/client/ayon_core/addon/base.py b/client/ayon_core/addon/base.py index f0763649ca..21b1193b07 100644 --- a/client/ayon_core/addon/base.py +++ b/client/ayon_core/addon/base.py @@ -14,9 +14,11 @@ from abc import ABCMeta, abstractmethod import six import appdirs +import ayon_api +from semver import VersionInfo +from ayon_core import AYON_CORE_ROOT from ayon_core.lib import Logger, is_dev_mode_enabled -from ayon_core.client import get_ayon_server_api_connection from ayon_core.settings import get_studio_settings from .interfaces import ( @@ -45,6 +47,11 @@ IGNORED_HOSTS_IN_AYON = { } IGNORED_MODULES_IN_AYON = set() +# When addon was moved from ayon-core codebase +# - this is used to log the missing addon +MOVED_ADDON_MILESTONE_VERSIONS = { + "applications": VersionInfo(0, 2, 0), +} # Inherit from `object` for Python 2 hosts class _ModuleClass(object): @@ -147,8 +154,7 @@ def load_addons(force=False): def _get_ayon_bundle_data(): - con = get_ayon_server_api_connection() - bundles = con.get_bundles()["bundles"] + bundles = ayon_api.get_bundles()["bundles"] bundle_name = os.getenv("AYON_BUNDLE_NAME") @@ -176,8 +182,7 @@ def _get_ayon_addons_information(bundle_info): output = [] bundle_addons = bundle_info["addons"] - con = get_ayon_server_api_connection() - addons = con.get_addons_info()["addons"] + addons = ayon_api.get_addons_info()["addons"] for addon in addons: name = addon["name"] versions = addon.get("versions") @@ -193,6 +198,45 @@ def _get_ayon_addons_information(bundle_info): return output +def _handle_moved_addons(addon_name, milestone_version, log): + """Log message that addon version is not compatible with current core. + + The function can return path to addon client code, but that can happen + only if ayon-core is used from code (for development), but still + logs a warning. + + Args: + addon_name (str): Addon name. + milestone_version (str): Milestone addon version. + log (logging.Logger): Logger object. + + Returns: + Union[str, None]: Addon dir or None. + """ + # Handle addons which were moved out of ayon-core + # - Try to fix it by loading it directly from server addons dir in + # ayon-core repository. But that will work only if ayon-core is + # used from code. + addon_dir = os.path.join( + os.path.dirname(os.path.dirname(AYON_CORE_ROOT)), + "server_addon", + addon_name, + "client", + ) + if not os.path.exists(addon_dir): + log.error(( + "Addon '{}' is not be available." + " Please update applications addon to '{}' or higher." + ).format(addon_name, milestone_version)) + return None + + log.warning(( + "Please update '{}' addon to '{}' or higher." + " Using client code from ayon-core repository." + ).format(addon_name, milestone_version)) + return addon_dir + + def _load_ayon_addons(openpype_modules, modules_key, log): """Load AYON addons based on information from server. @@ -250,6 +294,7 @@ def _load_ayon_addons(openpype_modules, modules_key, log): use_dev_path = dev_addon_info.get("enabled", False) addon_dir = None + milestone_version = MOVED_ADDON_MILESTONE_VERSIONS.get(addon_name) if use_dev_path: addon_dir = dev_addon_info["path"] if not addon_dir or not os.path.exists(addon_dir): @@ -258,6 +303,16 @@ def _load_ayon_addons(openpype_modules, modules_key, log): ).format(addon_name, addon_version, addon_dir)) continue + elif ( + milestone_version is not None + and VersionInfo.parse(addon_version) < milestone_version + ): + addon_dir = _handle_moved_addons( + addon_name, milestone_version, log + ) + if not addon_dir: + continue + elif addons_dir_exists: folder_name = "{}_{}".format(addon_name, addon_version) addon_dir = os.path.join(addons_dir, folder_name) @@ -342,9 +397,8 @@ def _load_addons_in_core( ): # Add current directory at first place # - has small differences in import logic - current_dir = os.path.abspath(os.path.dirname(__file__)) - hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts") - modules_dir = os.path.join(os.path.dirname(current_dir), "modules") + hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts") + modules_dir = os.path.join(AYON_CORE_ROOT, "modules") ignored_host_names = set(IGNORED_HOSTS_IN_AYON) ignored_module_dir_filenames = ( @@ -743,7 +797,7 @@ class AddonsManager: addon_classes = [] for module in openpype_modules: - # Go through globals in `pype.modules` + # Go through globals in `ayon_core.modules` for name in dir(module): modules_item = getattr(module, name, None) # Filter globals that are not classes which inherit from @@ -1077,7 +1131,7 @@ class AddonsManager: """Print out report of time spent on addons initialization parts. Reporting is not automated must be implemented for each initialization - part separatelly. Reports must be stored to `_report` attribute. + part separately. Reports must be stored to `_report` attribute. Print is skipped if `_report` is empty. Attribute `_report` is dictionary where key is "label" describing @@ -1269,7 +1323,7 @@ class TrayAddonsManager(AddonsManager): def add_doubleclick_callback(self, addon, callback): """Register doubleclick callbacks on tray icon. - Currently there is no way how to determine which is launched. Name of + Currently, there is no way how to determine which is launched. Name of callback can be defined with `doubleclick_callback` attribute. Missing feature how to define default callback. diff --git a/client/ayon_core/cli.py b/client/ayon_core/cli.py index 88b574da76..bd47dc1aac 100644 --- a/client/ayon_core/cli.py +++ b/client/ayon_core/cli.py @@ -4,6 +4,7 @@ import os import sys import code import traceback +from pathlib import Path import click import acre @@ -11,6 +12,7 @@ import acre from ayon_core import AYON_CORE_ROOT from ayon_core.addon import AddonsManager from ayon_core.settings import get_general_environments +from ayon_core.lib import initialize_ayon_connection, is_running_from_build from .cli_commands import Commands @@ -80,7 +82,7 @@ main_cli.set_alias("addon", "module") @main_cli.command() @click.argument("output_json_path") @click.option("--project", help="Project name", default=None) -@click.option("--asset", help="Asset name", default=None) +@click.option("--asset", help="Folder path", default=None) @click.option("--task", help="Task name", default=None) @click.option("--app", help="Application name", default=None) @click.option( @@ -95,6 +97,10 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup): environments will be extracted. Context options are "project", "asset", "task", "app" + + Deprecated: + This function is deprecated and will be removed in future. Please use + 'addon applications extractenvironments ...' instead. """ Commands.extractenvironments( output_json_path, project, asset, task, app, envgroup @@ -102,19 +108,18 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup): @main_cli.command() -@click.argument("paths", nargs=-1) -@click.option("-t", "--targets", help="Targets module", default=None, +@click.argument("path", required=True) +@click.option("-t", "--targets", help="Targets", default=None, multiple=True) @click.option("-g", "--gui", is_flag=True, help="Show Publish UI", default=False) -def publish(paths, targets, gui): +def publish(path, targets, gui): """Start CLI publishing. - Publish collects json from paths provided as an argument. - More than one path is allowed. + Publish collects json from path provided as an argument. +S """ - - Commands.publish(list(paths), targets, gui) + Commands.publish(path, targets, gui) @main_cli.command(context_settings={"ignore_unknown_options": True}) @@ -127,7 +132,7 @@ def publish_report_viewer(): @main_cli.command() @click.argument("output_path") @click.option("--project", help="Define project context") -@click.option("--asset", help="Define asset in project (project must be set)") +@click.option("--folder", help="Define folder in project (project must be set)") @click.option( "--strict", is_flag=True, @@ -136,18 +141,18 @@ def publish_report_viewer(): def contextselection( output_path, project, - asset, + folder, strict ): """Show Qt dialog to select context. - Context is project name, asset name and task name. The result is stored + Context is project name, folder path and task name. The result is stored into json file which path is passed in first argument. """ Commands.contextselection( output_path, project, - asset, + folder, strict ) @@ -163,16 +168,27 @@ def run(script): if not script: print("Error: missing path to script file.") + return + + # Remove first argument if it is the same as AYON executable + # - Forward compatibility with future AYON versions. + # - Current AYON launcher keeps the arguments with first argument but + # future versions might remove it. + first_arg = sys.argv[0] + if is_running_from_build(): + comp_path = os.getenv("AYON_EXECUTABLE") else: + comp_path = os.path.join(os.environ["AYON_ROOT"], "start.py") + # Compare paths and remove first argument if it is the same as AYON + if Path(first_arg).resolve() == Path(comp_path).resolve(): + sys.argv.pop(0) - args = sys.argv - args.remove("run") - args.remove(script) - sys.argv = args + # Remove 'run' command from sys.argv + sys.argv.remove("run") - args_string = " ".join(args[1:]) - print(f"... running: {script} {args_string}") - runpy.run_path(script, run_name="__main__", ) + args_string = " ".join(sys.argv[1:]) + print(f"... running: {script} {args_string}") + runpy.run_path(script, run_name="__main__") @main_cli.command() @@ -243,6 +259,7 @@ def _set_addons_environments(): def main(*args, **kwargs): + initialize_ayon_connection() python_path = os.getenv("PYTHONPATH", "") split_paths = python_path.split(os.pathsep) diff --git a/client/ayon_core/cli_commands.py b/client/ayon_core/cli_commands.py index a24710aef2..0fb18be687 100644 --- a/client/ayon_core/cli_commands.py +++ b/client/ayon_core/cli_commands.py @@ -2,7 +2,7 @@ """Implementation of AYON commands.""" import os import sys -import json +import warnings class Commands: @@ -41,38 +41,35 @@ class Commands: return click_func @staticmethod - def publish(paths, targets=None, gui=False): + def publish(path: str, targets: list=None, gui:bool=False) -> None: """Start headless publishing. - Publish use json from passed paths argument. + Publish use json from passed path argument. Args: - paths (list): Paths to jsons. - targets (string): What module should be targeted - (to choose validator for example) + path (str): Path to JSON. + targets (list of str): List of pyblish targets. gui (bool): Show publish UI. Raises: RuntimeError: When there is no path to process. - """ + RuntimeError: When executed with list of JSON paths. + """ from ayon_core.lib import Logger - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, - ) + from ayon_core.addon import AddonsManager from ayon_core.pipeline import ( install_ayon_plugins, get_global_context, ) - from ayon_core.tools.utils.host_tools import show_publish - from ayon_core.tools.utils.lib import qt_app_context # Register target and host - import pyblish.api import pyblish.util + if not isinstance(path, str): + raise RuntimeError("Path to JSON must be a string.") + # Fix older jobs for src_key, dst_key in ( ("AVALON_PROJECT", "AYON_PROJECT_NAME"), @@ -95,21 +92,16 @@ class Commands: publish_paths = manager.collect_plugin_paths()["publish"] - for path in publish_paths: - pyblish.api.register_plugin_path(path) + for plugin_path in publish_paths: + pyblish.api.register_plugin_path(plugin_path) - if not any(paths): - raise RuntimeError("No publish paths specified") - - app_full_name = os.getenv("AYON_APP_NAME") - if app_full_name: + applications_addon = manager.get_enabled_addon("applications") + if applications_addon is not None: context = get_global_context() - env = get_app_environments_for_context( + env = applications_addon.get_farm_publish_environment_variables( context["project_name"], context["folder_path"], context["task_name"], - app_full_name, - launch_type=LaunchTypes.farm_publish, ) os.environ.update(env) @@ -122,7 +114,7 @@ class Commands: else: pyblish.api.register_target("farm") - os.environ["AYON_PUBLISH_DATA"] = os.pathsep.join(paths) + os.environ["AYON_PUBLISH_DATA"] = path os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib log.info("Running publish ...") @@ -133,6 +125,8 @@ class Commands: print(plugin) if gui: + from ayon_core.tools.utils.host_tools import show_publish + from ayon_core.tools.utils.lib import qt_app_context with qt_app_context(): show_publish() else: @@ -149,39 +143,39 @@ class Commands: log.info("Publish finished.") @staticmethod - def extractenvironments(output_json_path, project, asset, task, app, - env_group): + def extractenvironments( + output_json_path, project, asset, task, app, env_group + ): """Produces json file with environment based on project and app. Called by Deadline plugin to propagate environment into render jobs. """ - from ayon_core.lib.applications import ( - get_app_environments_for_context, - LaunchTypes, + from ayon_core.addon import AddonsManager + + warnings.warn( + ( + "Command 'extractenvironments' is deprecated and will be" + " removed in future. Please use " + "'addon applications extractenvironments ...' instead." + ), + DeprecationWarning ) - if all((project, asset, task, app)): - env = get_app_environments_for_context( - project, - asset, - task, - app, - env_group=env_group, - launch_type=LaunchTypes.farm_render + addons_manager = AddonsManager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + raise RuntimeError( + "Applications addon is not available or enabled." ) - else: - env = os.environ.copy() - output_dir = os.path.dirname(output_json_path) - if not os.path.exists(output_dir): - os.makedirs(output_dir) - - with open(output_json_path, "w") as file_stream: - json.dump(env, file_stream, indent=4) + # Please ignore the fact this is using private method + applications_addon._cli_extract_environments( + output_json_path, project, asset, task, app, env_group + ) @staticmethod - def contextselection(output_path, project_name, asset_name, strict): + def contextselection(output_path, project_name, folder_path, strict): from ayon_core.tools.context_dialog import main - main(output_path, project_name, asset_name, strict) + main(output_path, project_name, folder_path, strict) diff --git a/client/ayon_core/client/__init__.py b/client/ayon_core/client/__init__.py deleted file mode 100644 index 00f4d9863f..0000000000 --- a/client/ayon_core/client/__init__.py +++ /dev/null @@ -1,110 +0,0 @@ -from .utils import get_ayon_server_api_connection - -from .entities import ( - get_projects, - get_project, - get_whole_project, - - get_asset_by_id, - get_asset_by_name, - get_assets, - get_archived_assets, - get_asset_ids_with_subsets, - - get_subset_by_id, - get_subset_by_name, - get_subsets, - get_subset_families, - - get_version_by_id, - get_version_by_name, - get_versions, - get_hero_version_by_id, - get_hero_version_by_subset_id, - get_hero_versions, - get_last_versions, - get_last_version_by_subset_id, - get_last_version_by_subset_name, - get_output_link_versions, - - version_is_latest, - - get_representation_by_id, - get_representation_by_name, - get_representations, - get_representation_parents, - get_representations_parents, - get_archived_representations, - - get_thumbnail, - get_thumbnails, - get_thumbnail_id_from_source, - - get_workfile_info, - - get_asset_name_identifier, -) - -from .entity_links import ( - get_linked_asset_ids, - get_linked_assets, - get_linked_representation_id, -) - -from .operations import ( - create_project, -) - - -__all__ = ( - "get_ayon_server_api_connection", - - "get_projects", - "get_project", - "get_whole_project", - - "get_asset_by_id", - "get_asset_by_name", - "get_assets", - "get_archived_assets", - "get_asset_ids_with_subsets", - - "get_subset_by_id", - "get_subset_by_name", - "get_subsets", - "get_subset_families", - - "get_version_by_id", - "get_version_by_name", - "get_versions", - "get_hero_version_by_id", - "get_hero_version_by_subset_id", - "get_hero_versions", - "get_last_versions", - "get_last_version_by_subset_id", - "get_last_version_by_subset_name", - "get_output_link_versions", - - "version_is_latest", - - "get_representation_by_id", - "get_representation_by_name", - "get_representations", - "get_representation_parents", - "get_representations_parents", - "get_archived_representations", - - "get_thumbnail", - "get_thumbnails", - "get_thumbnail_id_from_source", - - "get_workfile_info", - - "get_linked_asset_ids", - "get_linked_assets", - "get_linked_representation_id", - - "create_project", - - "get_asset_name_identifier", -) diff --git a/client/ayon_core/client/constants.py b/client/ayon_core/client/constants.py deleted file mode 100644 index 379c0d665f..0000000000 --- a/client/ayon_core/client/constants.py +++ /dev/null @@ -1,28 +0,0 @@ -# --- Folders --- -DEFAULT_FOLDER_FIELDS = { - "id", - "name", - "path", - "parentId", - "active", - "parents", - "thumbnailId" -} - -REPRESENTATION_FILES_FIELDS = { - "files.name", - "files.hash", - "files.id", - "files.path", - "files.size", -} - -CURRENT_PROJECT_SCHEMA = "openpype:project-3.0" -CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0" -CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0" -CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0" -CURRENT_VERSION_SCHEMA = "openpype:version-3.0" -CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0" -CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0" -CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0" -CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0" diff --git a/client/ayon_core/client/conversion_utils.py b/client/ayon_core/client/conversion_utils.py deleted file mode 100644 index 192eb194db..0000000000 --- a/client/ayon_core/client/conversion_utils.py +++ /dev/null @@ -1,1362 +0,0 @@ -import os -import arrow -import collections -import json - -import six - -from ayon_core.client.operations_base import REMOVED_VALUE -from .constants import ( - CURRENT_PROJECT_SCHEMA, - CURRENT_ASSET_DOC_SCHEMA, - CURRENT_SUBSET_SCHEMA, - CURRENT_VERSION_SCHEMA, - CURRENT_HERO_VERSION_SCHEMA, - CURRENT_REPRESENTATION_SCHEMA, - CURRENT_WORKFILE_INFO_SCHEMA, - REPRESENTATION_FILES_FIELDS, -) -from .utils import create_entity_id, prepare_entity_changes - -# --- Project entity --- -PROJECT_FIELDS_MAPPING_V3_V4 = { - "_id": {"name"}, - "name": {"name"}, - "data": {"data", "code"}, - "data.library_project": {"library"}, - "data.code": {"code"}, - "data.active": {"active"}, -} - -# TODO this should not be hardcoded but received from server!!! -# --- Folder entity --- -FOLDER_FIELDS_MAPPING_V3_V4 = { - "_id": {"id"}, - "name": {"name"}, - "label": {"label"}, - "data": { - "parentId", "parents", "active", "tasks", "thumbnailId" - }, - "data.visualParent": {"parentId"}, - "data.parents": {"parents"}, - "data.active": {"active"}, - "data.thumbnail_id": {"thumbnailId"}, - "data.entityType": {"folderType"} -} - -# --- Subset entity --- -SUBSET_FIELDS_MAPPING_V3_V4 = { - "_id": {"id"}, - "name": {"name"}, - "data.active": {"active"}, - "parent": {"folderId"} -} - -# --- Version entity --- -VERSION_FIELDS_MAPPING_V3_V4 = { - "_id": {"id"}, - "name": {"version"}, - "parent": {"productId"} -} - -# --- Representation entity --- -REPRESENTATION_FIELDS_MAPPING_V3_V4 = { - "_id": {"id"}, - "name": {"name"}, - "parent": {"versionId"}, - "context": {"context"}, - "files": {"files"}, -} - - -def project_fields_v3_to_v4(fields, con): - """Convert project fields from v3 to v4 structure. - - Args: - fields (Union[Iterable(str), None]): fields to be converted. - - Returns: - Union[Set(str), None]: Converted fields to v4 fields. - """ - - # TODO config fields - # - config.apps - # - config.groups - if not fields: - return None - - project_attribs = con.get_attributes_for_type("project") - output = set() - for field in fields: - # If config is needed the rest api call must be used - if field.startswith("config"): - return None - - if field in PROJECT_FIELDS_MAPPING_V3_V4: - output |= PROJECT_FIELDS_MAPPING_V3_V4[field] - if field == "data": - output |= { - "attrib.{}".format(attr) - for attr in project_attribs - } - - elif field.startswith("data"): - field_parts = field.split(".") - field_parts.pop(0) - data_key = ".".join(field_parts) - if data_key in project_attribs: - output.add("attrib.{}".format(data_key)) - else: - output.add("data") - print("Requested specific key from data {}".format(data_key)) - - else: - raise ValueError("Unknown field mapping for {}".format(field)) - - if "name" not in output: - output.add("name") - return output - - -def _get_default_template_name(templates): - default_template = None - for name, template in templates.items(): - if name == "default": - return "default" - - if default_template is None: - default_template = name - - return default_template - - -def _template_replacements_to_v3(template): - return ( - template - .replace("{product[name]}", "{subset}") - .replace("{product[type]}", "{family}") - ) - - -def _convert_template_item(template_item): - for key, value in tuple(template_item.items()): - template_item[key] = _template_replacements_to_v3(value) - - # Change 'directory' to 'folder' - if "directory" in template_item: - template_item["folder"] = template_item.pop("directory") - - if ( - "path" not in template_item - and "file" in template_item - and "folder" in template_item - ): - template_item["path"] = "/".join( - (template_item["folder"], template_item["file"]) - ) - - -def _fill_template_category(templates, cat_templates, cat_key): - default_template_name = _get_default_template_name(cat_templates) - for template_name, cat_template in cat_templates.items(): - _convert_template_item(cat_template) - if template_name == default_template_name: - templates[cat_key] = cat_template - else: - new_name = "{}_{}".format(cat_key, template_name) - templates["others"][new_name] = cat_template - - -def convert_v4_project_to_v3(project): - """Convert Project entity data from v4 structure to v3 structure. - - Args: - project (Dict[str, Any]): Project entity queried from v4 server. - - Returns: - Dict[str, Any]: Project converted to v3 structure. - """ - - if not project: - return project - - project_name = project["name"] - output = { - "_id": project_name, - "name": project_name, - "schema": CURRENT_PROJECT_SCHEMA, - "type": "project" - } - - data = project.get("data") or {} - attribs = project.get("attrib") or {} - apps_attr = attribs.pop("applications", None) or [] - applications = [ - {"name": app_name} - for app_name in apps_attr - ] - data.update(attribs) - if "tools" in data: - data["tools_env"] = data.pop("tools") - - data["entityType"] = "Project" - - config = {} - project_config = project.get("config") - - if project_config: - config["apps"] = applications - config["roots"] = project_config["roots"] - - templates = project_config["templates"] - templates["defaults"] = templates.pop("common", None) or {} - - others_templates = templates.pop("others", None) or {} - new_others_templates = {} - templates["others"] = new_others_templates - for name, template in others_templates.items(): - _convert_template_item(template) - new_others_templates[name] = template - - staging_templates = templates.pop("staging", None) - # Key 'staging_directories' is legacy key that changed - # to 'staging_dir' - _legacy_staging_templates = templates.pop("staging_directories", None) - if staging_templates is None: - staging_templates = _legacy_staging_templates - - if staging_templates is None: - staging_templates = {} - - # Prefix all staging template names with 'staging_' prefix - # and add them to 'others' - for name, template in staging_templates.items(): - _convert_template_item(template) - new_name = "staging_{}".format(name) - new_others_templates[new_name] = template - - for key in ( - "work", - "publish", - "hero", - ): - cat_templates = templates.pop(key) - _fill_template_category(templates, cat_templates, key) - - delivery_templates = templates.pop("delivery", None) or {} - new_delivery_templates = {} - for name, delivery_template in delivery_templates.items(): - new_delivery_templates[name] = "/".join( - (delivery_template["directory"], delivery_template["file"]) - ) - templates["delivery"] = new_delivery_templates - - config["templates"] = templates - - if "taskTypes" in project: - task_types = project["taskTypes"] - new_task_types = {} - for task_type in task_types: - name = task_type.pop("name") - # Change 'shortName' to 'short_name' - task_type["short_name"] = task_type.pop("shortName", None) - new_task_types[name] = task_type - - config["tasks"] = new_task_types - - if config: - output["config"] = config - - for data_key, key in ( - ("library_project", "library"), - ("code", "code"), - ("active", "active") - ): - if key in project: - data[data_key] = project[key] - - if "attrib" in project: - for key, value in project["attrib"].items(): - data[key] = value - - if data: - output["data"] = data - return output - - -def folder_fields_v3_to_v4(fields, con): - """Convert folder fields from v3 to v4 structure. - - Args: - fields (Union[Iterable(str), None]): fields to be converted. - - Returns: - Union[Set(str), None]: Converted fields to v4 fields. - """ - - if not fields: - return None - - folder_attributes = con.get_attributes_for_type("folder") - output = set() - for field in fields: - if field in ("schema", "type", "parent"): - continue - - if field in FOLDER_FIELDS_MAPPING_V3_V4: - output |= FOLDER_FIELDS_MAPPING_V3_V4[field] - if field == "data": - output |= { - "attrib.{}".format(attr) - for attr in folder_attributes - } - - elif field.startswith("data"): - field_parts = field.split(".") - field_parts.pop(0) - data_key = ".".join(field_parts) - if data_key == "label": - output.add("name") - - elif data_key in ("icon", "color"): - continue - - elif data_key.startswith("tasks"): - output.add("tasks") - - elif data_key in folder_attributes: - output.add("attrib.{}".format(data_key)) - - else: - output.add("data") - print("Requested specific key from data {}".format(data_key)) - - else: - raise ValueError("Unknown field mapping for {}".format(field)) - - if "id" not in output: - output.add("id") - return output - - -def convert_v4_tasks_to_v3(tasks): - """Convert v4 task item to v3 task. - - Args: - tasks (List[Dict[str, Any]]): Task entites. - - Returns: - Dict[str, Dict[str, Any]]: Tasks in v3 variant ready for v3 asset. - """ - - output = {} - for task in tasks: - task_name = task["name"] - new_task = { - "type": task["taskType"] - } - output[task_name] = new_task - return output - - -def convert_v4_folder_to_v3(folder, project_name): - """Convert v4 folder to v3 asset. - - Args: - folder (Dict[str, Any]): Folder entity data. - project_name (str): Project name from which folder was queried. - - Returns: - Dict[str, Any]: Converted v4 folder to v3 asset. - """ - - output = { - "_id": folder["id"], - "parent": project_name, - "type": "asset", - "schema": CURRENT_ASSET_DOC_SCHEMA - } - - output_data = folder.get("data") or {} - - if "name" in folder: - output["name"] = folder["name"] - output_data["label"] = folder["name"] - - if "folderType" in folder: - output_data["entityType"] = folder["folderType"] - - for src_key, dst_key in ( - ("parentId", "visualParent"), - ("active", "active"), - ("thumbnailId", "thumbnail_id"), - ("parents", "parents"), - ): - if src_key in folder: - output_data[dst_key] = folder[src_key] - - if "attrib" in folder: - output_data.update(folder["attrib"]) - - if "tools" in output_data: - output_data["tools_env"] = output_data.pop("tools") - - if "tasks" in folder: - output_data["tasks"] = convert_v4_tasks_to_v3(folder["tasks"]) - - output["data"] = output_data - - return output - - -def subset_fields_v3_to_v4(fields, con): - """Convert subset fields from v3 to v4 structure. - - Args: - fields (Union[Iterable(str), None]): fields to be converted. - - Returns: - Union[Set(str), None]: Converted fields to v4 fields. - """ - - if not fields: - return None - - product_attributes = con.get_attributes_for_type("product") - - output = set() - for field in fields: - if field in ("schema", "type"): - continue - - if field in SUBSET_FIELDS_MAPPING_V3_V4: - output |= SUBSET_FIELDS_MAPPING_V3_V4[field] - - elif field == "data": - output.add("productType") - output.add("active") - output |= { - "attrib.{}".format(attr) - for attr in product_attributes - } - - elif field.startswith("data"): - field_parts = field.split(".") - field_parts.pop(0) - data_key = ".".join(field_parts) - if data_key in ("family", "families"): - output.add("productType") - - elif data_key in product_attributes: - output.add("attrib.{}".format(data_key)) - - else: - output.add("data") - print("Requested specific key from data {}".format(data_key)) - - else: - raise ValueError("Unknown field mapping for {}".format(field)) - - if "id" not in output: - output.add("id") - return output - - -def convert_v4_subset_to_v3(subset): - output = { - "_id": subset["id"], - "type": "subset", - "schema": CURRENT_SUBSET_SCHEMA - } - if "folderId" in subset: - output["parent"] = subset["folderId"] - - output_data = subset.get("data") or {} - - if "name" in subset: - output["name"] = subset["name"] - - if "active" in subset: - output_data["active"] = subset["active"] - - if "attrib" in subset: - attrib = subset["attrib"] - if "productGroup" in attrib: - attrib["subsetGroup"] = attrib.pop("productGroup") - output_data.update(attrib) - - family = subset.get("productType") - if family: - output_data["family"] = family - output_data["families"] = [family] - - output["data"] = output_data - - return output - - -def version_fields_v3_to_v4(fields, con): - """Convert version fields from v3 to v4 structure. - - Args: - fields (Union[Iterable(str), None]): fields to be converted. - - Returns: - Union[Set(str), None]: Converted fields to v4 fields. - """ - - if not fields: - return None - - version_attributes = con.get_attributes_for_type("version") - - output = set() - for field in fields: - if field in ("type", "schema", "version_id"): - continue - - if field in VERSION_FIELDS_MAPPING_V3_V4: - output |= VERSION_FIELDS_MAPPING_V3_V4[field] - - elif field == "data": - output |= { - "attrib.{}".format(attr) - for attr in version_attributes - } - output |= { - "author", - "createdAt", - "thumbnailId", - } - - elif field.startswith("data"): - field_parts = field.split(".") - field_parts.pop(0) - data_key = ".".join(field_parts) - if data_key in version_attributes: - output.add("attrib.{}".format(data_key)) - - elif data_key == "thumbnail_id": - output.add("thumbnailId") - - elif data_key == "time": - output.add("createdAt") - - elif data_key == "author": - output.add("author") - - elif data_key in ("tags", ): - continue - - else: - output.add("data") - print("Requested specific key from data {}".format(data_key)) - - else: - raise ValueError("Unknown field mapping for {}".format(field)) - - if "id" not in output: - output.add("id") - return output - - -def convert_v4_version_to_v3(version): - """Convert v4 version entity to v4 version. - - Args: - version (Dict[str, Any]): Queried v4 version entity. - - Returns: - Dict[str, Any]: Conveted version entity to v3 structure. - """ - - version_num = version["version"] - if version_num < 0: - output = { - "_id": version["id"], - "type": "hero_version", - "schema": CURRENT_HERO_VERSION_SCHEMA, - } - if "productId" in version: - output["parent"] = version["productId"] - - if "data" in version: - output["data"] = version["data"] - return output - - output = { - "_id": version["id"], - "type": "version", - "name": version_num, - "schema": CURRENT_VERSION_SCHEMA - } - if "productId" in version: - output["parent"] = version["productId"] - - output_data = version.get("data") or {} - if "attrib" in version: - output_data.update(version["attrib"]) - - for src_key, dst_key in ( - ("active", "active"), - ("thumbnailId", "thumbnail_id"), - ("author", "author") - ): - if src_key in version: - output_data[dst_key] = version[src_key] - - if "createdAt" in version: - created_at = arrow.get(version["createdAt"]).to("local") - output_data["time"] = created_at.strftime("%Y%m%dT%H%M%SZ") - - output["data"] = output_data - - return output - - -def representation_fields_v3_to_v4(fields, con): - """Convert representation fields from v3 to v4 structure. - - Args: - fields (Union[Iterable(str), None]): fields to be converted. - - Returns: - Union[Set(str), None]: Converted fields to v4 fields. - """ - - if not fields: - return None - - representation_attributes = con.get_attributes_for_type("representation") - - output = set() - for field in fields: - if field in ("type", "schema"): - continue - - if field in REPRESENTATION_FIELDS_MAPPING_V3_V4: - output |= REPRESENTATION_FIELDS_MAPPING_V3_V4[field] - - elif field.startswith("context"): - output.add("context") - - # TODO: 'files' can have specific attributes but the keys in v3 and v4 - # are not the same (content is not the same) - elif field.startswith("files"): - output |= REPRESENTATION_FILES_FIELDS - - elif field.startswith("data"): - output |= { - "attrib.{}".format(attr) - for attr in representation_attributes - } - - else: - raise ValueError("Unknown field mapping for {}".format(field)) - - if "id" not in output: - output.add("id") - return output - - -def convert_v4_representation_to_v3(representation): - """Convert v4 representation to v3 representation. - - Args: - representation (Dict[str, Any]): Queried representation from v4 server. - - Returns: - Dict[str, Any]: Converted representation to v3 structure. - """ - - output = { - "type": "representation", - "schema": CURRENT_REPRESENTATION_SCHEMA, - } - if "id" in representation: - output["_id"] = representation["id"] - - for v3_key, v4_key in ( - ("name", "name"), - ("parent", "versionId") - ): - if v4_key in representation: - output[v3_key] = representation[v4_key] - - if "context" in representation: - context = representation["context"] - if isinstance(context, six.string_types): - context = json.loads(context) - - if "asset" not in context and "folder" in context: - _c_folder = context["folder"] - context["asset"] = _c_folder["name"] - - elif "asset" in context and "folder" not in context: - context["folder"] = {"name": context["asset"]} - - if "product" in context: - _c_product = context.pop("product") - context["family"] = _c_product["type"] - context["subset"] = _c_product["name"] - - output["context"] = context - - if "files" in representation: - files = representation["files"] - new_files = [] - # From GraphQl is list - if isinstance(files, list): - for file_info in files: - file_info["_id"] = file_info["id"] - new_files.append(file_info) - - # From RestPoint is dictionary - elif isinstance(files, dict): - for file_id, file_info in files: - file_info["_id"] = file_id - new_files.append(file_info) - - for file_info in new_files: - if not file_info.get("sites"): - file_info["sites"] = [{ - "name": "studio" - }] - - output["files"] = new_files - - if representation.get("active") is False: - output["type"] = "archived_representation" - output["old_id"] = output["_id"] - - output_data = representation.get("data") or {} - if "attrib" in representation: - output_data.update(representation["attrib"]) - - for key, data_key in ( - ("active", "active"), - ): - if key in representation: - output_data[data_key] = representation[key] - - if "template" in output_data: - output_data["template"] = ( - output_data["template"] - .replace("{product[name]}", "{subset}") - .replace("{product[type]}", "{family}") - ) - - output["data"] = output_data - - return output - - -def workfile_info_fields_v3_to_v4(fields): - if not fields: - return None - - new_fields = set() - fields = set(fields) - for v3_key, v4_key in ( - ("_id", "id"), - ("files", "path"), - ("filename", "name"), - ("data", "data"), - ): - if v3_key in fields: - new_fields.add(v4_key) - - if "parent" in fields or "task_name" in fields: - new_fields.add("taskId") - - return new_fields - - -def convert_v4_workfile_info_to_v3(workfile_info, task): - output = { - "type": "workfile", - "schema": CURRENT_WORKFILE_INFO_SCHEMA, - } - if "id" in workfile_info: - output["_id"] = workfile_info["id"] - - if "path" in workfile_info: - output["files"] = [workfile_info["path"]] - - if "name" in workfile_info: - output["filename"] = workfile_info["name"] - - if "taskId" in workfile_info: - output["task_name"] = task["name"] - output["parent"] = task["folderId"] - - return output - - -def convert_create_asset_to_v4(asset, project, con): - folder_attributes = con.get_attributes_for_type("folder") - - asset_data = asset["data"] - parent_id = asset_data["visualParent"] - - folder = { - "name": asset["name"], - "parentId": parent_id, - } - entity_id = asset.get("_id") - if entity_id: - folder["id"] = entity_id - - attribs = {} - data = {} - for key, value in asset_data.items(): - if key in ( - "visualParent", - "thumbnail_id", - "parents", - "inputLinks", - "avalon_mongo_id", - ): - continue - - if key not in folder_attributes: - data[key] = value - elif value is not None: - attribs[key] = value - - if attribs: - folder["attrib"] = attribs - - if data: - folder["data"] = data - return folder - - -def convert_create_task_to_v4(task, project, con): - if not project["taskTypes"]: - raise ValueError( - "Project \"{}\" does not have any task types".format( - project["name"])) - - task_type = task["type"] - if task_type not in project["taskTypes"]: - task_type = tuple(project["taskTypes"].keys())[0] - - return { - "name": task["name"], - "taskType": task_type, - "folderId": task["folderId"] - } - - -def convert_create_subset_to_v4(subset, con): - product_attributes = con.get_attributes_for_type("product") - - subset_data = subset["data"] - product_type = subset_data.get("family") - if not product_type: - product_type = subset_data["families"][0] - - converted_product = { - "name": subset["name"], - "productType": product_type, - "folderId": subset["parent"], - } - entity_id = subset.get("_id") - if entity_id: - converted_product["id"] = entity_id - - attribs = {} - data = {} - if "subsetGroup" in subset_data: - subset_data["productGroup"] = subset_data.pop("subsetGroup") - for key, value in subset_data.items(): - if key not in product_attributes: - data[key] = value - elif value is not None: - attribs[key] = value - - if attribs: - converted_product["attrib"] = attribs - - if data: - converted_product["data"] = data - - return converted_product - - -def convert_create_version_to_v4(version, con): - version_attributes = con.get_attributes_for_type("version") - converted_version = { - "version": version["name"], - "productId": version["parent"], - } - entity_id = version.get("_id") - if entity_id: - converted_version["id"] = entity_id - - version_data = version["data"] - attribs = {} - data = {} - for key, value in version_data.items(): - if key not in version_attributes: - data[key] = value - elif value is not None: - attribs[key] = value - - if attribs: - converted_version["attrib"] = attribs - - if data: - converted_version["data"] = attribs - - return converted_version - - -def convert_create_hero_version_to_v4(hero_version, project_name, con): - if "version_id" in hero_version: - version_id = hero_version["version_id"] - version = con.get_version_by_id(project_name, version_id) - version["version"] = - version["version"] - - for auto_key in ( - "name", - "createdAt", - "updatedAt", - "author", - ): - version.pop(auto_key, None) - - return version - - version_attributes = con.get_attributes_for_type("version") - converted_version = { - "version": hero_version["version"], - "productId": hero_version["parent"], - } - entity_id = hero_version.get("_id") - if entity_id: - converted_version["id"] = entity_id - - version_data = hero_version["data"] - attribs = {} - data = {} - for key, value in version_data.items(): - if key not in version_attributes: - data[key] = value - elif value is not None: - attribs[key] = value - - if attribs: - converted_version["attrib"] = attribs - - if data: - converted_version["data"] = attribs - - return converted_version - - -def convert_create_representation_to_v4(representation, con): - representation_attributes = con.get_attributes_for_type("representation") - - converted_representation = { - "name": representation["name"], - "versionId": representation["parent"], - } - entity_id = representation.get("_id") - if entity_id: - converted_representation["id"] = entity_id - - if representation.get("type") == "archived_representation": - converted_representation["active"] = False - - new_files = [] - for file_item in representation["files"]: - new_file_item = { - key: value - for key, value in file_item.items() - if key in ("hash", "path", "size") - } - new_file_item.update({ - "id": create_entity_id(), - "hash_type": "op3", - "name": os.path.basename(new_file_item["path"]) - }) - new_files.append(new_file_item) - - converted_representation["files"] = new_files - - context = representation["context"] - if "folder" not in context: - context["folder"] = { - "name": context.get("asset") - } - - context["product"] = { - "type": context.pop("family", None), - "name": context.pop("subset", None), - } - - attribs = {} - data = { - "context": context, - } - - representation_data = representation["data"] - representation_data["template"] = ( - representation_data["template"] - .replace("{subset}", "{product[name]}") - .replace("{family}", "{product[type]}") - ) - - for key, value in representation_data.items(): - if key not in representation_attributes: - data[key] = value - elif value is not None: - attribs[key] = value - - if attribs: - converted_representation["attrib"] = attribs - - if data: - converted_representation["data"] = data - - return converted_representation - - -def convert_create_workfile_info_to_v4(data, project_name, con): - folder_id = data["parent"] - task_name = data["task_name"] - task = con.get_task_by_name(project_name, folder_id, task_name) - if not task: - return None - - workfile_attributes = con.get_attributes_for_type("workfile") - filename = data["filename"] - possible_attribs = { - "extension": os.path.splitext(filename)[-1] - } - attribs = {} - for attr in workfile_attributes: - if attr in possible_attribs: - attribs[attr] = possible_attribs[attr] - - output = { - "path": data["files"][0], - "name": filename, - "taskId": task["id"] - } - if "_id" in data: - output["id"] = data["_id"] - - if attribs: - output["attrib"] = attribs - - output_data = data.get("data") - if output_data: - output["data"] = output_data - return output - - -def _from_flat_dict(data): - output = {} - for key, value in data.items(): - output_value = output - subkeys = key.split(".") - last_key = subkeys.pop(-1) - for subkey in subkeys: - if subkey not in output_value: - output_value[subkey] = {} - output_value = output_value[subkey] - - output_value[last_key] = value - return output - - -def _to_flat_dict(data): - output = {} - flat_queue = collections.deque() - flat_queue.append(([], data)) - while flat_queue: - item = flat_queue.popleft() - parent_keys, data = item - for key, value in data.items(): - keys = list(parent_keys) - keys.append(key) - if isinstance(value, dict): - flat_queue.append((keys, value)) - else: - full_key = ".".join(keys) - output[full_key] = value - - return output - - -def convert_update_folder_to_v4(project_name, asset_id, update_data, con): - new_update_data = {} - - folder_attributes = con.get_attributes_for_type("folder") - full_update_data = _from_flat_dict(update_data) - data = full_update_data.get("data") - - has_new_parent = False - has_task_changes = False - parent_id = None - tasks = None - new_data = {} - attribs = full_update_data.pop("attrib", {}) - if "type" in update_data: - new_update_data["active"] = update_data["type"] == "asset" - - if data: - if "thumbnail_id" in data: - new_update_data["thumbnailId"] = data.pop("thumbnail_id") - - if "tasks" in data: - tasks = data.pop("tasks") - has_task_changes = True - - if "visualParent" in data: - has_new_parent = True - parent_id = data.pop("visualParent") - - for key, value in data.items(): - if key in folder_attributes: - attribs[key] = value - else: - new_data[key] = value - - if "name" in update_data: - new_update_data["name"] = update_data["name"] - - if "type" in update_data: - new_type = update_data["type"] - if new_type == "asset": - new_update_data["active"] = True - elif new_type == "archived_asset": - new_update_data["active"] = False - - if has_new_parent: - new_update_data["parentId"] = parent_id - - if new_data: - print("Folder has new data: {}".format(new_data)) - new_update_data["data"] = new_data - - if attribs: - new_update_data["attrib"] = attribs - - if has_task_changes: - raise ValueError("Task changes of folder are not implemented") - - return _to_flat_dict(new_update_data) - - -def convert_update_subset_to_v4(project_name, subset_id, update_data, con): - new_update_data = {} - - product_attributes = con.get_attributes_for_type("product") - full_update_data = _from_flat_dict(update_data) - data = full_update_data.get("data") - new_data = {} - attribs = full_update_data.pop("attrib", {}) - if data: - if "family" in data: - family = data.pop("family") - new_update_data["productType"] = family - - if "families" in data: - families = data.pop("families") - if "productType" not in new_update_data: - new_update_data["productType"] = families[0] - - if "subsetGroup" in data: - data["productGroup"] = data.pop("subsetGroup") - for key, value in data.items(): - if key in product_attributes: - if value is REMOVED_VALUE: - value = None - attribs[key] = value - - elif value is not REMOVED_VALUE: - new_data[key] = value - - if "name" in update_data: - new_update_data["name"] = update_data["name"] - - if "type" in update_data: - new_type = update_data["type"] - if new_type == "subset": - new_update_data["active"] = True - elif new_type == "archived_subset": - new_update_data["active"] = False - - if "parent" in update_data: - new_update_data["folderId"] = update_data["parent"] - - flat_data = _to_flat_dict(new_update_data) - if attribs: - flat_data["attrib"] = attribs - - if new_data: - print("Subset has new data: {}".format(new_data)) - flat_data["data"] = new_data - - return flat_data - - -def convert_update_version_to_v4(project_name, version_id, update_data, con): - new_update_data = {} - - version_attributes = con.get_attributes_for_type("version") - full_update_data = _from_flat_dict(update_data) - data = full_update_data.get("data") - new_data = {} - attribs = full_update_data.pop("attrib", {}) - if data: - if "author" in data: - new_update_data["author"] = data.pop("author") - - if "thumbnail_id" in data: - new_update_data["thumbnailId"] = data.pop("thumbnail_id") - - for key, value in data.items(): - if key in version_attributes: - if value is REMOVED_VALUE: - value = None - attribs[key] = value - - elif value is not REMOVED_VALUE: - new_data[key] = value - - if "name" in update_data: - new_update_data["version"] = update_data["name"] - - if "type" in update_data: - new_type = update_data["type"] - if new_type == "version": - new_update_data["active"] = True - elif new_type == "archived_version": - new_update_data["active"] = False - - if "parent" in update_data: - new_update_data["productId"] = update_data["parent"] - - flat_data = _to_flat_dict(new_update_data) - if attribs: - flat_data["attrib"] = attribs - - if new_data: - print("Version has new data: {}".format(new_data)) - flat_data["data"] = new_data - return flat_data - - -def convert_update_hero_version_to_v4( - project_name, hero_version_id, update_data, con -): - if "version_id" not in update_data: - return None - - version_id = update_data["version_id"] - hero_version = con.get_hero_version_by_id(project_name, hero_version_id) - version = con.get_version_by_id(project_name, version_id) - version["version"] = - version["version"] - version["id"] = hero_version_id - - for auto_key in ( - "name", - "createdAt", - "updatedAt", - "author", - ): - version.pop(auto_key, None) - - return prepare_entity_changes(hero_version, version) - - -def convert_update_representation_to_v4( - project_name, repre_id, update_data, con -): - new_update_data = {} - - folder_attributes = con.get_attributes_for_type("folder") - full_update_data = _from_flat_dict(update_data) - data = full_update_data.get("data") - - new_data = {} - attribs = full_update_data.pop("attrib", {}) - if data: - for key, value in data.items(): - if key in folder_attributes: - attribs[key] = value - else: - new_data[key] = value - - if "template" in attribs: - attribs["template"] = ( - attribs["template"] - .replace("{family}", "{product[type]}") - .replace("{subset}", "{product[name]}") - ) - - if "name" in update_data: - new_update_data["name"] = update_data["name"] - - if "type" in update_data: - new_type = update_data["type"] - if new_type == "representation": - new_update_data["active"] = True - elif new_type == "archived_representation": - new_update_data["active"] = False - - if "parent" in update_data: - new_update_data["versionId"] = update_data["parent"] - - if "context" in update_data: - context = update_data["context"] - if "folder" not in context and "asset" in context: - context["folder"] = {"name": context.pop("asset")} - - if "family" in context or "subset" in context: - context["product"] = { - "name": context.pop("subset"), - "type": context.pop("family"), - } - new_data["context"] = context - - if "files" in update_data: - new_files = update_data["files"] - if isinstance(new_files, dict): - new_files = list(new_files.values()) - - for item in new_files: - for key in tuple(item.keys()): - if key not in ("hash", "path", "size"): - item.pop(key) - item.update({ - "id": create_entity_id(), - "name": os.path.basename(item["path"]), - "hash_type": "op3", - }) - new_update_data["files"] = new_files - - flat_data = _to_flat_dict(new_update_data) - if attribs: - flat_data["attrib"] = attribs - - if new_data: - print("Representation has new data: {}".format(new_data)) - flat_data["data"] = new_data - - return flat_data - - -def convert_update_workfile_info_to_v4( - project_name, workfile_id, update_data, con -): - return { - key: value - for key, value in update_data.items() - if key.startswith("data") - } diff --git a/client/ayon_core/client/entities.py b/client/ayon_core/client/entities.py deleted file mode 100644 index 5ef2571421..0000000000 --- a/client/ayon_core/client/entities.py +++ /dev/null @@ -1,741 +0,0 @@ -import collections - -from .constants import CURRENT_THUMBNAIL_SCHEMA -from .utils import get_ayon_server_api_connection -from .openpype_comp import get_folders_with_tasks -from .conversion_utils import ( - project_fields_v3_to_v4, - convert_v4_project_to_v3, - - folder_fields_v3_to_v4, - convert_v4_folder_to_v3, - - subset_fields_v3_to_v4, - convert_v4_subset_to_v3, - - version_fields_v3_to_v4, - convert_v4_version_to_v3, - - representation_fields_v3_to_v4, - convert_v4_representation_to_v3, - - workfile_info_fields_v3_to_v4, - convert_v4_workfile_info_to_v3, -) - - -def get_asset_name_identifier(asset_doc): - """Get asset name identifier by asset document. - - This function is added because of AYON implementation where name - identifier is not just a name but full path. - - Asset document must have "name" key, and "data.parents" when in AYON mode. - - Args: - asset_doc (dict[str, Any]): Asset document. - """ - - parents = list(asset_doc["data"]["parents"]) - parents.append(asset_doc["name"]) - return "/" + "/".join(parents) - - -def get_projects(active=True, inactive=False, library=None, fields=None): - if not active and not inactive: - return - - if active and inactive: - active = None - elif active: - active = True - elif inactive: - active = False - - con = get_ayon_server_api_connection() - fields = project_fields_v3_to_v4(fields, con) - for project in con.get_projects(active, library, fields=fields): - yield convert_v4_project_to_v3(project) - - -def get_project(project_name, active=True, inactive=False, fields=None): - # Skip if both are disabled - con = get_ayon_server_api_connection() - fields = project_fields_v3_to_v4(fields, con) - return convert_v4_project_to_v3( - con.get_project(project_name, fields=fields) - ) - - -def get_whole_project(*args, **kwargs): - raise NotImplementedError("'get_whole_project' not implemented") - - -def _get_subsets( - project_name, - subset_ids=None, - subset_names=None, - folder_ids=None, - names_by_folder_ids=None, - archived=False, - fields=None -): - # Convert fields and add minimum required fields - con = get_ayon_server_api_connection() - fields = subset_fields_v3_to_v4(fields, con) - if fields is not None: - for key in ( - "id", - "active" - ): - fields.add(key) - - active = True - if archived: - active = None - - for subset in con.get_products( - project_name, - product_ids=subset_ids, - product_names=subset_names, - folder_ids=folder_ids, - names_by_folder_ids=names_by_folder_ids, - active=active, - fields=fields, - ): - yield convert_v4_subset_to_v3(subset) - - -def _get_versions( - project_name, - version_ids=None, - subset_ids=None, - versions=None, - hero=True, - standard=True, - latest=None, - active=None, - fields=None -): - con = get_ayon_server_api_connection() - - fields = version_fields_v3_to_v4(fields, con) - - # Make sure 'productId' and 'version' are available when hero versions - # are queried - if fields and hero: - fields = set(fields) - fields |= {"productId", "version"} - - queried_versions = con.get_versions( - project_name, - version_ids=version_ids, - product_ids=subset_ids, - versions=versions, - hero=hero, - standard=standard, - latest=latest, - active=active, - fields=fields - ) - - version_entities = [] - hero_versions = [] - for version in queried_versions: - if version["version"] < 0: - hero_versions.append(version) - else: - version_entities.append(convert_v4_version_to_v3(version)) - - if hero_versions: - subset_ids = set() - versions_nums = set() - for hero_version in hero_versions: - versions_nums.add(abs(hero_version["version"])) - subset_ids.add(hero_version["productId"]) - - hero_eq_versions = con.get_versions( - project_name, - product_ids=subset_ids, - versions=versions_nums, - hero=False, - fields=["id", "version", "productId"] - ) - hero_eq_by_subset_id = collections.defaultdict(list) - for version in hero_eq_versions: - hero_eq_by_subset_id[version["productId"]].append(version) - - for hero_version in hero_versions: - abs_version = abs(hero_version["version"]) - subset_id = hero_version["productId"] - version_id = None - for version in hero_eq_by_subset_id.get(subset_id, []): - if version["version"] == abs_version: - version_id = version["id"] - break - conv_hero = convert_v4_version_to_v3(hero_version) - conv_hero["version_id"] = version_id - version_entities.append(conv_hero) - - return version_entities - - -def get_asset_by_id(project_name, asset_id, fields=None): - assets = get_assets( - project_name, asset_ids=[asset_id], fields=fields - ) - for asset in assets: - return asset - return None - - -def get_asset_by_name(project_name, asset_name, fields=None): - assets = get_assets( - project_name, asset_names=[asset_name], fields=fields - ) - for asset in assets: - return asset - return None - - -def _folders_query(project_name, con, fields, **kwargs): - if fields is None or "tasks" in fields: - folders = get_folders_with_tasks( - con, project_name, fields=fields, **kwargs - ) - - else: - folders = con.get_folders(project_name, fields=fields, **kwargs) - - for folder in folders: - yield folder - - -def get_assets( - project_name, - asset_ids=None, - asset_names=None, - parent_ids=None, - archived=False, - fields=None -): - if not project_name: - return - - active = True - if archived: - active = None - - con = get_ayon_server_api_connection() - fields = folder_fields_v3_to_v4(fields, con) - kwargs = dict( - folder_ids=asset_ids, - parent_ids=parent_ids, - active=active, - ) - if not asset_names: - for folder in _folders_query(project_name, con, fields, **kwargs): - yield convert_v4_folder_to_v3(folder, project_name) - return - - new_asset_names = set() - folder_paths = set() - for name in asset_names: - if "/" in name: - folder_paths.add(name) - else: - new_asset_names.add(name) - - yielded_ids = set() - if folder_paths: - for folder in _folders_query( - project_name, con, fields, folder_paths=folder_paths, **kwargs - ): - yielded_ids.add(folder["id"]) - yield convert_v4_folder_to_v3(folder, project_name) - - if not new_asset_names: - return - - for folder in _folders_query( - project_name, con, fields, folder_names=new_asset_names, **kwargs - ): - if folder["id"] not in yielded_ids: - yielded_ids.add(folder["id"]) - yield convert_v4_folder_to_v3(folder, project_name) - - -def get_archived_assets( - project_name, - asset_ids=None, - asset_names=None, - parent_ids=None, - fields=None -): - return get_assets( - project_name, - asset_ids, - asset_names, - parent_ids, - True, - fields - ) - - -def get_asset_ids_with_subsets(project_name, asset_ids=None): - con = get_ayon_server_api_connection() - return con.get_folder_ids_with_products(project_name, asset_ids) - - -def get_subset_by_id(project_name, subset_id, fields=None): - subsets = get_subsets( - project_name, subset_ids=[subset_id], fields=fields - ) - for subset in subsets: - return subset - return None - - -def get_subset_by_name(project_name, subset_name, asset_id, fields=None): - subsets = get_subsets( - project_name, - subset_names=[subset_name], - asset_ids=[asset_id], - fields=fields - ) - for subset in subsets: - return subset - return None - - -def get_subsets( - project_name, - subset_ids=None, - subset_names=None, - asset_ids=None, - names_by_asset_ids=None, - archived=False, - fields=None -): - return _get_subsets( - project_name, - subset_ids, - subset_names, - asset_ids, - names_by_asset_ids, - archived, - fields=fields - ) - - -def get_subset_families(project_name, subset_ids=None): - con = get_ayon_server_api_connection() - return con.get_product_type_names(project_name, subset_ids) - - -def get_version_by_id(project_name, version_id, fields=None): - versions = get_versions( - project_name, - version_ids=[version_id], - fields=fields, - hero=True - ) - for version in versions: - return version - return None - - -def get_version_by_name(project_name, version, subset_id, fields=None): - versions = get_versions( - project_name, - subset_ids=[subset_id], - versions=[version], - fields=fields - ) - for version in versions: - return version - return None - - -def get_versions( - project_name, - version_ids=None, - subset_ids=None, - versions=None, - hero=False, - fields=None -): - return _get_versions( - project_name, - version_ids, - subset_ids, - versions, - hero=hero, - standard=True, - fields=fields - ) - - -def get_hero_version_by_id(project_name, version_id, fields=None): - versions = get_hero_versions( - project_name, - version_ids=[version_id], - fields=fields - ) - for version in versions: - return version - return None - - -def get_hero_version_by_subset_id( - project_name, subset_id, fields=None -): - versions = get_hero_versions( - project_name, - subset_ids=[subset_id], - fields=fields - ) - for version in versions: - return version - return None - - -def get_hero_versions( - project_name, subset_ids=None, version_ids=None, fields=None -): - return _get_versions( - project_name, - version_ids=version_ids, - subset_ids=subset_ids, - hero=True, - standard=False, - fields=fields - ) - - -def get_last_versions(project_name, subset_ids, active=None, fields=None): - if fields: - fields = set(fields) - fields.add("parent") - - versions = _get_versions( - project_name, - subset_ids=subset_ids, - latest=True, - hero=False, - active=active, - fields=fields - ) - return { - version["parent"]: version - for version in versions - } - - -def get_last_version_by_subset_id(project_name, subset_id, fields=None): - versions = _get_versions( - project_name, - subset_ids=[subset_id], - latest=True, - hero=False, - fields=fields - ) - if not versions: - return None - return versions[0] - - -def get_last_version_by_subset_name( - project_name, - subset_name, - asset_id=None, - asset_name=None, - fields=None -): - if not asset_id and not asset_name: - return None - - if not asset_id: - asset = get_asset_by_name( - project_name, asset_name, fields=["_id"] - ) - if not asset: - return None - asset_id = asset["_id"] - - subset = get_subset_by_name( - project_name, subset_name, asset_id, fields=["_id"] - ) - if not subset: - return None - return get_last_version_by_subset_id( - project_name, subset["_id"], fields=fields - ) - - -def get_output_link_versions(project_name, version_id, fields=None): - if not version_id: - return [] - - con = get_ayon_server_api_connection() - version_links = con.get_version_links( - project_name, version_id, link_direction="out") - - version_ids = { - link["entityId"] - for link in version_links - if link["entityType"] == "version" - } - if not version_ids: - return [] - - return get_versions(project_name, version_ids=version_ids, fields=fields) - - -def version_is_latest(project_name, version_id): - con = get_ayon_server_api_connection() - return con.version_is_latest(project_name, version_id) - - -def get_representation_by_id(project_name, representation_id, fields=None): - representations = get_representations( - project_name, - representation_ids=[representation_id], - fields=fields - ) - for representation in representations: - return representation - return None - - -def get_representation_by_name( - project_name, representation_name, version_id, fields=None -): - representations = get_representations( - project_name, - representation_names=[representation_name], - version_ids=[version_id], - fields=fields - ) - for representation in representations: - return representation - return None - - -def get_representations( - project_name, - representation_ids=None, - representation_names=None, - version_ids=None, - context_filters=None, - names_by_version_ids=None, - archived=False, - standard=True, - fields=None -): - if context_filters is not None: - # TODO should we add the support? - # - there was ability to fitler using regex - raise ValueError("OP v4 can't filter by representation context.") - - if not archived and not standard: - return - - if archived and not standard: - active = False - elif not archived and standard: - active = True - else: - active = None - - con = get_ayon_server_api_connection() - fields = representation_fields_v3_to_v4(fields, con) - if fields and active is not None: - fields.add("active") - - representations = con.get_representations( - project_name, - representation_ids=representation_ids, - representation_names=representation_names, - version_ids=version_ids, - names_by_version_ids=names_by_version_ids, - active=active, - fields=fields - ) - for representation in representations: - yield convert_v4_representation_to_v3(representation) - - -def get_representation_parents(project_name, representation): - if not representation: - return None - - repre_id = representation["_id"] - parents_by_repre_id = get_representations_parents( - project_name, [representation] - ) - return parents_by_repre_id[repre_id] - - -def get_representations_parents(project_name, representations): - repre_ids = { - repre["_id"] - for repre in representations - } - con = get_ayon_server_api_connection() - parents_by_repre_id = con.get_representations_parents(project_name, - repre_ids) - folder_ids = set() - for parents in parents_by_repre_id .values(): - folder_ids.add(parents[2]["id"]) - - tasks_by_folder_id = {} - - new_parents = {} - for repre_id, parents in parents_by_repre_id .items(): - version, subset, folder, project = parents - folder_tasks = tasks_by_folder_id.get(folder["id"]) or {} - folder["tasks"] = folder_tasks - new_parents[repre_id] = ( - convert_v4_version_to_v3(version), - convert_v4_subset_to_v3(subset), - convert_v4_folder_to_v3(folder, project_name), - project - ) - return new_parents - - -def get_archived_representations( - project_name, - representation_ids=None, - representation_names=None, - version_ids=None, - context_filters=None, - names_by_version_ids=None, - fields=None -): - return get_representations( - project_name, - representation_ids=representation_ids, - representation_names=representation_names, - version_ids=version_ids, - context_filters=context_filters, - names_by_version_ids=names_by_version_ids, - archived=True, - standard=False, - fields=fields - ) - - -def get_thumbnail( - project_name, thumbnail_id, entity_type, entity_id, fields=None -): - """Receive thumbnail entity data. - - Args: - project_name (str): Name of project where to look for queried entities. - thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity. - entity_type (str): Type of entity for which the thumbnail should be - received. - entity_id (str): Id of entity for which the thumbnail should be - received. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. - - Returns: - None: If thumbnail with specified id was not found. - Dict: Thumbnail entity data which can be reduced to specified 'fields'. - """ - - if not thumbnail_id or not entity_type or not entity_id: - return None - - if entity_type == "asset": - entity_type = "folder" - - elif entity_type == "hero_version": - entity_type = "version" - - return { - "_id": thumbnail_id, - "type": "thumbnail", - "schema": CURRENT_THUMBNAIL_SCHEMA, - "data": { - "entity_type": entity_type, - "entity_id": entity_id - } - } - - -def get_thumbnails(project_name, thumbnail_contexts, fields=None): - """Get thumbnail entities. - - Warning: - This function is not OpenPype compatible. There is none usage of this - function in codebase so there is nothing to convert. The previous - implementation cannot be AYON compatible without entity types. - """ - - thumbnail_items = set() - for thumbnail_context in thumbnail_contexts: - thumbnail_id, entity_type, entity_id = thumbnail_context - thumbnail_item = get_thumbnail( - project_name, thumbnail_id, entity_type, entity_id - ) - if thumbnail_item: - thumbnail_items.add(thumbnail_item) - return list(thumbnail_items) - - -def get_thumbnail_id_from_source(project_name, src_type, src_id): - """Receive thumbnail id from source entity. - - Args: - project_name (str): Name of project where to look for queried entities. - src_type (str): Type of source entity ('asset', 'version'). - src_id (Union[str, ObjectId]): Id of source entity. - - Returns: - ObjectId: Thumbnail id assigned to entity. - None: If Source entity does not have any thumbnail id assigned. - """ - - if not src_type or not src_id: - return None - - if src_type == "version": - version = get_version_by_id( - project_name, src_id, fields=["data.thumbnail_id"] - ) or {} - return version.get("data", {}).get("thumbnail_id") - - if src_type == "asset": - asset = get_asset_by_id( - project_name, src_id, fields=["data.thumbnail_id"] - ) or {} - return asset.get("data", {}).get("thumbnail_id") - - return None - - -def get_workfile_info( - project_name, asset_id, task_name, filename, fields=None -): - if not asset_id or not task_name or not filename: - return None - - con = get_ayon_server_api_connection() - task = con.get_task_by_name( - project_name, asset_id, task_name, fields=["id", "name", "folderId"] - ) - if not task: - return None - - fields = workfile_info_fields_v3_to_v4(fields) - - for workfile_info in con.get_workfiles_info( - project_name, task_ids=[task["id"]], fields=fields - ): - if workfile_info["name"] == filename: - return convert_v4_workfile_info_to_v3(workfile_info, task) - return None diff --git a/client/ayon_core/client/entity_links.py b/client/ayon_core/client/entity_links.py deleted file mode 100644 index 7fb9fbde6f..0000000000 --- a/client/ayon_core/client/entity_links.py +++ /dev/null @@ -1,157 +0,0 @@ -from .utils import get_ayon_server_api_connection -from .entities import get_assets, get_representation_by_id - - -def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None): - """Extract linked asset ids from asset document. - - One of asset document or asset id must be passed. - - Note: - Asset links now works only from asset to assets. - - Args: - project_name (str): Project where to look for asset. - asset_doc (dict): Asset document from DB. - asset_id (str): Asset id to find its document. - - Returns: - List[Union[ObjectId, str]]: Asset ids of input links. - """ - - output = [] - if not asset_doc and not asset_id: - return output - - if not asset_id: - asset_id = asset_doc["_id"] - - con = get_ayon_server_api_connection() - links = con.get_folder_links(project_name, asset_id, link_direction="in") - return [ - link["entityId"] - for link in links - if link["entityType"] == "folder" - ] - - -def get_linked_assets( - project_name, asset_doc=None, asset_id=None, fields=None -): - """Return linked assets based on passed asset document. - - One of asset document or asset id must be passed. - - Args: - project_name (str): Name of project where to look for queried entities. - asset_doc (Dict[str, Any]): Asset document from database. - asset_id (Union[ObjectId, str]): Asset id. Can be used instead of - asset document. - fields (Iterable[str]): Fields that should be returned. All fields are - returned if 'None' is passed. - - Returns: - List[Dict[str, Any]]: Asset documents of input links for passed - asset doc. - """ - - link_ids = get_linked_asset_ids(project_name, asset_doc, asset_id) - if not link_ids: - return [] - return list(get_assets(project_name, asset_ids=link_ids, fields=fields)) - - - -def get_linked_representation_id( - project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None -): - """Returns list of linked ids of particular type (if provided). - - One of representation document or representation id must be passed. - Note: - Representation links now works only from representation through version - back to representations. - - Todos: - Missing depth query. Not sure how it did find more representations in - depth, probably links to version? - - Args: - project_name (str): Name of project where look for links. - repre_doc (Dict[str, Any]): Representation document. - repre_id (Union[ObjectId, str]): Representation id. - link_type (str): Type of link (e.g. 'reference', ...). - max_depth (int): Limit recursion level. Default: 0 - - Returns: - List[ObjectId] Linked representation ids. - """ - - if repre_doc: - repre_id = repre_doc["_id"] - - if not repre_id and not repre_doc: - return [] - - version_id = None - if repre_doc: - version_id = repre_doc.get("parent") - - if not version_id: - repre_doc = get_representation_by_id( - project_name, repre_id, fields=["parent"] - ) - if repre_doc: - version_id = repre_doc["parent"] - - if not version_id: - return [] - - if max_depth is None or max_depth == 0: - max_depth = 1 - - link_types = None - if link_type: - link_types = [link_type] - - con = get_ayon_server_api_connection() - # Store already found version ids to avoid recursion, and also to store - # output -> Don't forget to remove 'version_id' at the end!!! - linked_version_ids = {version_id} - # Each loop of depth will reset this variable - versions_to_check = {version_id} - for _ in range(max_depth): - if not versions_to_check: - break - - versions_links = con.get_versions_links( - project_name, - versions_to_check, - link_types=link_types, - link_direction="out") - - versions_to_check = set() - for links in versions_links.values(): - for link in links: - # Care only about version links - if link["entityType"] != "version": - continue - entity_id = link["entityId"] - # Skip already found linked version ids - if entity_id in linked_version_ids: - continue - linked_version_ids.add(entity_id) - versions_to_check.add(entity_id) - - linked_version_ids.remove(version_id) - if not linked_version_ids: - return [] - con = get_ayon_server_api_connection() - representations = con.get_representations( - project_name, - version_ids=linked_version_ids, - fields=["id"]) - return [ - repre["id"] - for repre in representations - ] diff --git a/client/ayon_core/client/notes.md b/client/ayon_core/client/notes.md deleted file mode 100644 index 59743892eb..0000000000 --- a/client/ayon_core/client/notes.md +++ /dev/null @@ -1,39 +0,0 @@ -# Client functionality -## Reason -Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code. - -Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tightly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state. - -## Queries -Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity. - -## Changes -Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data. - -### Create -Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues. - -### Update -Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare__update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementation. - -### Delete -Delete operation need entity id. Entity will be deleted from mongo. - - -## What (probably) won't be replaced -Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future. -- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data. -- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3. -- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure. -- Code parts that is marked as deprecated in v3 or will be deprecated in v4. - - integrate asset legacy publish plugin - already is legacy kept for safety - - integrate thumbnail - thumbnails will be stored in different way in v4 - - input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation". - -## Known missing replacements -- change subset group in loader tool -- integrate subset group -- query input links in openpype lib -- create project in openpype lib -- save/create workfile doc in openpype lib -- integrate hero version diff --git a/client/ayon_core/client/openpype_comp.py b/client/ayon_core/client/openpype_comp.py deleted file mode 100644 index 71a141e913..0000000000 --- a/client/ayon_core/client/openpype_comp.py +++ /dev/null @@ -1,159 +0,0 @@ -import collections -import json - -import six -from ayon_api.graphql import GraphQlQuery, FIELD_VALUE, fields_to_dict - -from .constants import DEFAULT_FOLDER_FIELDS - - -def folders_tasks_graphql_query(fields): - query = GraphQlQuery("FoldersQuery") - project_name_var = query.add_variable("projectName", "String!") - folder_ids_var = query.add_variable("folderIds", "[String!]") - parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]") - folder_paths_var = query.add_variable("folderPaths", "[String!]") - folder_names_var = query.add_variable("folderNames", "[String!]") - has_products_var = query.add_variable("folderHasProducts", "Boolean!") - - project_field = query.add_field("project") - project_field.set_filter("name", project_name_var) - - folders_field = project_field.add_field_with_edges("folders") - folders_field.set_filter("ids", folder_ids_var) - folders_field.set_filter("parentIds", parent_folder_ids_var) - folders_field.set_filter("names", folder_names_var) - folders_field.set_filter("paths", folder_paths_var) - folders_field.set_filter("hasProducts", has_products_var) - - fields = set(fields) - fields.discard("tasks") - tasks_field = folders_field.add_field_with_edges("tasks") - tasks_field.add_field("name") - tasks_field.add_field("taskType") - - nested_fields = fields_to_dict(fields) - - query_queue = collections.deque() - for key, value in nested_fields.items(): - query_queue.append((key, value, folders_field)) - - while query_queue: - item = query_queue.popleft() - key, value, parent = item - field = parent.add_field(key) - if value is FIELD_VALUE: - continue - - for k, v in value.items(): - query_queue.append((k, v, field)) - return query - - -def get_folders_with_tasks( - con, - project_name, - folder_ids=None, - folder_paths=None, - folder_names=None, - parent_ids=None, - active=True, - fields=None -): - """Query folders with tasks from server. - - This is for v4 compatibility where tasks were stored on assets. This is - an inefficient way how folders and tasks are queried so it was added only - as compatibility function. - - Todos: - Folder name won't be unique identifier, so we should add folder path - filtering. - - Notes: - Filter 'active' don't have direct filter in GraphQl. - - Args: - con (ServerAPI): Connection to server. - project_name (str): Name of project where folders are. - folder_ids (Iterable[str]): Folder ids to filter. - folder_paths (Iterable[str]): Folder paths used for filtering. - folder_names (Iterable[str]): Folder names used for filtering. - parent_ids (Iterable[str]): Ids of folder parents. Use 'None' - if folder is direct child of project. - active (Union[bool, None]): Filter active/inactive folders. Both - are returned if is set to None. - fields (Union[Iterable(str), None]): Fields to be queried - for folder. All possible folder fields are returned if 'None' - is passed. - - Yields: - Dict[str, Any]: Queried folder entities. - """ - - if not project_name: - return - - filters = { - "projectName": project_name - } - if folder_ids is not None: - folder_ids = set(folder_ids) - if not folder_ids: - return - filters["folderIds"] = list(folder_ids) - - if folder_paths is not None: - folder_paths = set(folder_paths) - if not folder_paths: - return - filters["folderPaths"] = list(folder_paths) - - if folder_names is not None: - folder_names = set(folder_names) - if not folder_names: - return - filters["folderNames"] = list(folder_names) - - if parent_ids is not None: - parent_ids = set(parent_ids) - if not parent_ids: - return - if None in parent_ids: - # Replace 'None' with '"root"' which is used during GraphQl - # query for parent ids filter for folders without folder - # parent - parent_ids.remove(None) - parent_ids.add("root") - - if project_name in parent_ids: - # Replace project name with '"root"' which is used during - # GraphQl query for parent ids filter for folders without - # folder parent - parent_ids.remove(project_name) - parent_ids.add("root") - - filters["parentFolderIds"] = list(parent_ids) - - if fields: - fields = set(fields) - else: - fields = con.get_default_fields_for_type("folder") - fields |= DEFAULT_FOLDER_FIELDS - - if active is not None: - fields.add("active") - - query = folders_tasks_graphql_query(fields) - for attr, filter_value in filters.items(): - query.set_variable_value(attr, filter_value) - - parsed_data = query.query(con) - folders = parsed_data["project"]["folders"] - for folder in folders: - if active is not None and folder["active"] is not active: - continue - folder_data = folder.get("data") - if isinstance(folder_data, six.string_types): - folder["data"] = json.loads(folder_data) - yield folder diff --git a/client/ayon_core/client/operations.py b/client/ayon_core/client/operations.py deleted file mode 100644 index 71b3ca226a..0000000000 --- a/client/ayon_core/client/operations.py +++ /dev/null @@ -1,880 +0,0 @@ -import copy -import json -import collections -import uuid -import datetime - -from ayon_api.server_api import ( - PROJECT_NAME_ALLOWED_SYMBOLS, - PROJECT_NAME_REGEX, -) - -from .constants import ( - CURRENT_PROJECT_SCHEMA, - CURRENT_PROJECT_CONFIG_SCHEMA, - CURRENT_ASSET_DOC_SCHEMA, - CURRENT_SUBSET_SCHEMA, - CURRENT_VERSION_SCHEMA, - CURRENT_HERO_VERSION_SCHEMA, - CURRENT_REPRESENTATION_SCHEMA, - CURRENT_WORKFILE_INFO_SCHEMA, - CURRENT_THUMBNAIL_SCHEMA, -) -from .operations_base import ( - REMOVED_VALUE, - CreateOperation, - UpdateOperation, - DeleteOperation, - BaseOperationsSession -) -from .conversion_utils import ( - convert_create_asset_to_v4, - convert_create_task_to_v4, - convert_create_subset_to_v4, - convert_create_version_to_v4, - convert_create_hero_version_to_v4, - convert_create_representation_to_v4, - convert_create_workfile_info_to_v4, - - convert_update_folder_to_v4, - convert_update_subset_to_v4, - convert_update_version_to_v4, - convert_update_hero_version_to_v4, - convert_update_representation_to_v4, - convert_update_workfile_info_to_v4, -) -from .utils import create_entity_id, get_ayon_server_api_connection - - -def _create_or_convert_to_id(entity_id=None): - if entity_id is None: - return create_entity_id() - - # Validate if can be converted to uuid - uuid.UUID(entity_id) - return entity_id - - -def new_project_document( - project_name, project_code, config, data=None, entity_id=None -): - """Create skeleton data of project document. - - Args: - project_name (str): Name of project. Used as identifier of a project. - project_code (str): Shorter version of projet without spaces and - special characters (in most of cases). Should be also considered - as unique name across projects. - config (Dic[str, Any]): Project config consist of roots, templates, - applications and other project Anatomy related data. - data (Dict[str, Any]): Project data with information about it's - attributes (e.g. 'fps' etc.) or integration specific keys. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of project document. - """ - - if data is None: - data = {} - - data["code"] = project_code - - return { - "_id": _create_or_convert_to_id(entity_id), - "name": project_name, - "type": CURRENT_PROJECT_SCHEMA, - "entity_data": data, - "config": config - } - - -def new_asset_document( - name, project_id, parent_id, parents, data=None, entity_id=None -): - """Create skeleton data of asset document. - - Args: - name (str): Is considered as unique identifier of asset in project. - project_id (Union[str, ObjectId]): Id of project doument. - parent_id (Union[str, ObjectId]): Id of parent asset. - parents (List[str]): List of parent assets names. - data (Dict[str, Any]): Asset document data. Empty dictionary is used - if not passed. Value of 'parent_id' is used to fill 'visualParent'. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of asset document. - """ - - if data is None: - data = {} - if parent_id is not None: - parent_id = _create_or_convert_to_id(parent_id) - data["visualParent"] = parent_id - data["parents"] = parents - - return { - "_id": _create_or_convert_to_id(entity_id), - "type": "asset", - "name": name, - # This will be ignored - "parent": project_id, - "data": data, - "schema": CURRENT_ASSET_DOC_SCHEMA - } - - -def new_subset_document(name, family, asset_id, data=None, entity_id=None): - """Create skeleton data of subset document. - - Args: - name (str): Is considered as unique identifier of subset under asset. - family (str): Subset's family. - asset_id (Union[str, ObjectId]): Id of parent asset. - data (Dict[str, Any]): Subset document data. Empty dictionary is used - if not passed. Value of 'family' is used to fill 'family'. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of subset document. - """ - - if data is None: - data = {} - data["family"] = family - return { - "_id": _create_or_convert_to_id(entity_id), - "schema": CURRENT_SUBSET_SCHEMA, - "type": "subset", - "name": name, - "data": data, - "parent": _create_or_convert_to_id(asset_id) - } - - -def new_version_doc(version, subset_id, data=None, entity_id=None): - """Create skeleton data of version document. - - Args: - version (int): Is considered as unique identifier of version - under subset. - subset_id (Union[str, ObjectId]): Id of parent subset. - data (Dict[str, Any]): Version document data. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of version document. - """ - - if data is None: - data = {} - - return { - "_id": _create_or_convert_to_id(entity_id), - "schema": CURRENT_VERSION_SCHEMA, - "type": "version", - "name": int(version), - "parent": _create_or_convert_to_id(subset_id), - "data": data - } - - -def new_hero_version_doc(subset_id, data, version=None, entity_id=None): - """Create skeleton data of hero version document. - - Args: - subset_id (Union[str, ObjectId]): Id of parent subset. - data (Dict[str, Any]): Version document data. - version (int): Version of source version. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of version document. - """ - - if version is None: - version = -1 - elif version > 0: - version = -version - - return { - "_id": _create_or_convert_to_id(entity_id), - "schema": CURRENT_HERO_VERSION_SCHEMA, - "type": "hero_version", - "version": version, - "parent": _create_or_convert_to_id(subset_id), - "data": data - } - - -def new_representation_doc( - name, version_id, context, data=None, entity_id=None -): - """Create skeleton data of representation document. - - Args: - name (str): Representation name considered as unique identifier - of representation under version. - version_id (Union[str, ObjectId]): Id of parent version. - context (Dict[str, Any]): Representation context used for fill template - of to query. - data (Dict[str, Any]): Representation document data. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of version document. - """ - - if data is None: - data = {} - - return { - "_id": _create_or_convert_to_id(entity_id), - "schema": CURRENT_REPRESENTATION_SCHEMA, - "type": "representation", - "parent": _create_or_convert_to_id(version_id), - "name": name, - "data": data, - - # Imprint shortcut to context for performance reasons. - "context": context - } - - -def new_thumbnail_doc(data=None, entity_id=None): - """Create skeleton data of thumbnail document. - - Args: - data (Dict[str, Any]): Thumbnail document data. - entity_id (Union[str, ObjectId]): Predefined id of document. New id is - created if not passed. - - Returns: - Dict[str, Any]: Skeleton of thumbnail document. - """ - - if data is None: - data = {} - - return { - "_id": _create_or_convert_to_id(entity_id), - "type": "thumbnail", - "schema": CURRENT_THUMBNAIL_SCHEMA, - "data": data - } - - -def new_workfile_info_doc( - filename, asset_id, task_name, files, data=None, entity_id=None -): - """Create skeleton data of workfile info document. - - Workfile document is at this moment used primarily for artist notes. - - Args: - filename (str): Filename of workfile. - asset_id (Union[str, ObjectId]): Id of asset under which workfile live. - task_name (str): Task under which was workfile created. - files (List[str]): List of rootless filepaths related to workfile. - data (Dict[str, Any]): Additional metadata. - - Returns: - Dict[str, Any]: Skeleton of workfile info document. - """ - - if not data: - data = {} - - return { - "_id": _create_or_convert_to_id(entity_id), - "type": "workfile", - "parent": _create_or_convert_to_id(asset_id), - "task_name": task_name, - "filename": filename, - "data": data, - "files": files - } - - -def _prepare_update_data(old_doc, new_doc, replace): - changes = {} - for key, value in new_doc.items(): - if key not in old_doc or value != old_doc[key]: - changes[key] = value - - if replace: - for key in old_doc.keys(): - if key not in new_doc: - changes[key] = REMOVED_VALUE - return changes - - -def prepare_subset_update_data(old_doc, new_doc, replace=True): - """Compare two subset documents and prepare update data. - - Based on compared values will create update data for - 'MongoUpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -def prepare_version_update_data(old_doc, new_doc, replace=True): - """Compare two version documents and prepare update data. - - Based on compared values will create update data for - 'MongoUpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -def prepare_hero_version_update_data(old_doc, new_doc, replace=True): - """Compare two hero version documents and prepare update data. - - Based on compared values will create update data for 'UpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - changes = _prepare_update_data(old_doc, new_doc, replace) - changes.pop("version_id", None) - return changes - - -def prepare_representation_update_data(old_doc, new_doc, replace=True): - """Compare two representation documents and prepare update data. - - Based on compared values will create update data for - 'MongoUpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - changes = _prepare_update_data(old_doc, new_doc, replace) - context = changes.get("data", {}).get("context") - # Make sure that both 'family' and 'subset' are in changes if - # one of them changed (they'll both become 'product'). - if ( - context - and ("family" in context or "subset" in context) - ): - context["family"] = new_doc["data"]["context"]["family"] - context["subset"] = new_doc["data"]["context"]["subset"] - - return changes - - -def prepare_workfile_info_update_data(old_doc, new_doc, replace=True): - """Compare two workfile info documents and prepare update data. - - Based on compared values will create update data for - 'MongoUpdateOperation'. - - Empty output means that documents are identical. - - Returns: - Dict[str, Any]: Changes between old and new document. - """ - - return _prepare_update_data(old_doc, new_doc, replace) - - -class FailedOperations(Exception): - pass - - -def entity_data_json_default(value): - if isinstance(value, datetime.datetime): - return int(value.timestamp()) - - raise TypeError( - "Object of type {} is not JSON serializable".format(str(type(value))) - ) - - -def failed_json_default(value): - return "< Failed value {} > {}".format(type(value), str(value)) - - -class ServerCreateOperation(CreateOperation): - """Operation to create an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - data (Dict[str, Any]): Data of entity that will be created. - """ - - def __init__(self, project_name, entity_type, data, session): - self._session = session - - if not data: - data = {} - data = copy.deepcopy(data) - if entity_type == "project": - raise ValueError("Project cannot be created using operations") - - tasks = None - if entity_type in "asset": - # TODO handle tasks - entity_type = "folder" - if "data" in data: - tasks = data["data"].get("tasks") - - project = self._session.get_project(project_name) - new_data = convert_create_asset_to_v4(data, project, self.con) - - elif entity_type == "task": - project = self._session.get_project(project_name) - new_data = convert_create_task_to_v4(data, project, self.con) - - elif entity_type == "subset": - new_data = convert_create_subset_to_v4(data, self.con) - entity_type = "product" - - elif entity_type == "version": - new_data = convert_create_version_to_v4(data, self.con) - - elif entity_type == "hero_version": - new_data = convert_create_hero_version_to_v4( - data, project_name, self.con - ) - entity_type = "version" - - elif entity_type in ("representation", "archived_representation"): - new_data = convert_create_representation_to_v4(data, self.con) - entity_type = "representation" - - elif entity_type == "workfile": - new_data = convert_create_workfile_info_to_v4( - data, project_name, self.con - ) - - else: - raise ValueError( - "Unhandled entity type \"{}\"".format(entity_type) - ) - - # Simple check if data can be dumped into json - # - should raise error on 'ObjectId' object - try: - new_data = json.loads( - json.dumps(new_data, default=entity_data_json_default) - ) - - except: - raise ValueError("Couldn't json parse body: {}".format( - json.dumps(new_data, default=failed_json_default) - )) - - super(ServerCreateOperation, self).__init__( - project_name, entity_type, new_data - ) - - if "id" not in self._data: - self._data["id"] = create_entity_id() - - if tasks: - copied_tasks = copy.deepcopy(tasks) - for task_name, task in copied_tasks.items(): - task["name"] = task_name - task["folderId"] = self._data["id"] - self.session.create_entity( - project_name, "task", task, nested_id=self.id - ) - - @property - def con(self): - return self.session.con - - @property - def session(self): - return self._session - - @property - def entity_id(self): - return self._data["id"] - - def to_server_operation(self): - return { - "id": self.id, - "type": "create", - "entityType": self.entity_type, - "entityId": self.entity_id, - "data": self._data - } - - -class ServerUpdateOperation(UpdateOperation): - """Operation to update an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - entity_id (Union[str, ObjectId]): Identifier of an entity. - update_data (Dict[str, Any]): Key -> value changes that will be set in - database. If value is set to 'REMOVED_VALUE' the key will be - removed. Only first level of dictionary is checked (on purpose). - """ - - def __init__( - self, project_name, entity_type, entity_id, update_data, session - ): - self._session = session - - update_data = copy.deepcopy(update_data) - if entity_type == "project": - raise ValueError("Project cannot be created using operations") - - if entity_type in ("asset", "archived_asset"): - new_update_data = convert_update_folder_to_v4( - project_name, entity_id, update_data, self.con - ) - entity_type = "folder" - - elif entity_type == "subset": - new_update_data = convert_update_subset_to_v4( - project_name, entity_id, update_data, self.con - ) - entity_type = "product" - - elif entity_type == "version": - new_update_data = convert_update_version_to_v4( - project_name, entity_id, update_data, self.con - ) - - elif entity_type == "hero_version": - new_update_data = convert_update_hero_version_to_v4( - project_name, entity_id, update_data, self.con - ) - entity_type = "version" - - elif entity_type in ("representation", "archived_representation"): - new_update_data = convert_update_representation_to_v4( - project_name, entity_id, update_data, self.con - ) - entity_type = "representation" - - elif entity_type == "workfile": - new_update_data = convert_update_workfile_info_to_v4( - project_name, entity_id, update_data, self.con - ) - - else: - raise ValueError( - "Unhandled entity type \"{}\"".format(entity_type) - ) - - try: - new_update_data = json.loads( - json.dumps(new_update_data, default=entity_data_json_default) - ) - - except: - raise ValueError("Couldn't json parse body: {}".format( - json.dumps(new_update_data, default=failed_json_default) - )) - - super(ServerUpdateOperation, self).__init__( - project_name, entity_type, entity_id, new_update_data - ) - - @property - def con(self): - return self.session.con - - @property - def session(self): - return self._session - - def to_server_operation(self): - if not self._update_data: - return None - - update_data = {} - for key, value in self._update_data.items(): - if value is REMOVED_VALUE: - value = None - update_data[key] = value - - return { - "id": self.id, - "type": "update", - "entityType": self.entity_type, - "entityId": self.entity_id, - "data": update_data - } - - -class ServerDeleteOperation(DeleteOperation): - """Operation to delete an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - entity_id (Union[str, ObjectId]): Entity id that will be removed. - """ - - def __init__(self, project_name, entity_type, entity_id, session): - self._session = session - - if entity_type == "asset": - entity_type = "folder" - - elif entity_type == "hero_version": - entity_type = "version" - - elif entity_type == "subset": - entity_type = "product" - - super(ServerDeleteOperation, self).__init__( - project_name, entity_type, entity_id - ) - - @property - def con(self): - return self.session.con - - @property - def session(self): - return self._session - - def to_server_operation(self): - return { - "id": self.id, - "type": self.operation_name, - "entityId": self.entity_id, - "entityType": self.entity_type, - } - - -class OperationsSession(BaseOperationsSession): - def __init__(self, con=None, *args, **kwargs): - super(OperationsSession, self).__init__(*args, **kwargs) - if con is None: - con = get_ayon_server_api_connection() - self._con = con - self._project_cache = {} - self._nested_operations = collections.defaultdict(list) - - @property - def con(self): - return self._con - - def get_project(self, project_name): - if project_name not in self._project_cache: - self._project_cache[project_name] = self.con.get_project( - project_name) - return copy.deepcopy(self._project_cache[project_name]) - - def commit(self): - """Commit session operations.""" - - operations, self._operations = self._operations, [] - if not operations: - return - - operations_by_project = collections.defaultdict(list) - for operation in operations: - operations_by_project[operation.project_name].append(operation) - - body_by_id = {} - results = [] - for project_name, operations in operations_by_project.items(): - operations_body = [] - for operation in operations: - body = operation.to_server_operation() - if body is not None: - try: - json.dumps(body) - except: - raise ValueError("Couldn't json parse body: {}".format( - json.dumps( - body, indent=4, default=failed_json_default - ) - )) - - body_by_id[operation.id] = body - operations_body.append(body) - - if operations_body: - result = self._con.post( - "projects/{}/operations".format(project_name), - operations=operations_body, - canFail=False - ) - results.append(result.data) - - for result in results: - if result.get("success"): - continue - - if "operations" not in result: - raise FailedOperations( - "Operation failed. Content: {}".format(str(result)) - ) - - for op_result in result["operations"]: - if not op_result["success"]: - operation_id = op_result["id"] - raise FailedOperations(( - "Operation \"{}\" failed with data:\n{}\nError: {}." - ).format( - operation_id, - json.dumps(body_by_id[operation_id], indent=4), - op_result.get("error", "unknown"), - )) - - def create_entity(self, project_name, entity_type, data, nested_id=None): - """Fast access to 'ServerCreateOperation'. - - Args: - project_name (str): On which project the creation happens. - entity_type (str): Which entity type will be created. - data (Dicst[str, Any]): Entity data. - nested_id (str): Id of other operation from which is triggered - operation -> Operations can trigger suboperations but they - must be added to operations list after it's parent is added. - - Returns: - ServerCreateOperation: Object of update operation. - """ - - operation = ServerCreateOperation( - project_name, entity_type, data, self - ) - - if nested_id: - self._nested_operations[nested_id].append(operation) - else: - self.add(operation) - if operation.id in self._nested_operations: - self.extend(self._nested_operations.pop(operation.id)) - - return operation - - def update_entity( - self, project_name, entity_type, entity_id, update_data, nested_id=None - ): - """Fast access to 'ServerUpdateOperation'. - - Returns: - ServerUpdateOperation: Object of update operation. - """ - - operation = ServerUpdateOperation( - project_name, entity_type, entity_id, update_data, self - ) - if nested_id: - self._nested_operations[nested_id].append(operation) - else: - self.add(operation) - if operation.id in self._nested_operations: - self.extend(self._nested_operations.pop(operation.id)) - return operation - - def delete_entity( - self, project_name, entity_type, entity_id, nested_id=None - ): - """Fast access to 'ServerDeleteOperation'. - - Returns: - ServerDeleteOperation: Object of delete operation. - """ - - operation = ServerDeleteOperation( - project_name, entity_type, entity_id, self - ) - if nested_id: - self._nested_operations[nested_id].append(operation) - else: - self.add(operation) - if operation.id in self._nested_operations: - self.extend(self._nested_operations.pop(operation.id)) - return operation - - -def create_project( - project_name, - project_code, - library_project=False, - preset_name=None, - con=None -): - """Create project using OpenPype settings. - - This project creation function is not validating project document on - creation. It is because project document is created blindly with only - minimum required information about project which is it's name, code, type - and schema. - - Entered project name must be unique and project must not exist yet. - - Note: - This function is here to be OP v4 ready but in v3 has more logic - to do. That's why inner imports are in the body. - - Args: - project_name (str): New project name. Should be unique. - project_code (str): Project's code should be unique too. - library_project (bool): Project is library project. - preset_name (str): Name of anatomy preset. Default is used if not - passed. - con (ServerAPI): Connection to server with logged user. - - Raises: - ValueError: When project name already exists in MongoDB. - - Returns: - dict: Created project document. - """ - - if con is None: - con = get_ayon_server_api_connection() - - return con.create_project( - project_name, - project_code, - library_project, - preset_name - ) - - -def delete_project(project_name, con=None): - if con is None: - con = get_ayon_server_api_connection() - - return con.delete_project(project_name) - - -def create_thumbnail(project_name, src_filepath, thumbnail_id=None, con=None): - if con is None: - con = get_ayon_server_api_connection() - return con.create_thumbnail(project_name, src_filepath, thumbnail_id) diff --git a/client/ayon_core/client/operations_base.py b/client/ayon_core/client/operations_base.py deleted file mode 100644 index 887b237b1c..0000000000 --- a/client/ayon_core/client/operations_base.py +++ /dev/null @@ -1,289 +0,0 @@ -import uuid -import copy -from abc import ABCMeta, abstractmethod, abstractproperty -import six - -REMOVED_VALUE = object() - - -@six.add_metaclass(ABCMeta) -class AbstractOperation(object): - """Base operation class. - - Operation represent a call into database. The call can create, change or - remove data. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - """ - - def __init__(self, project_name, entity_type): - self._project_name = project_name - self._entity_type = entity_type - self._id = str(uuid.uuid4()) - - @property - def project_name(self): - return self._project_name - - @property - def id(self): - """Identifier of operation.""" - - return self._id - - @property - def entity_type(self): - return self._entity_type - - @abstractproperty - def operation_name(self): - """Stringified type of operation.""" - - pass - - def to_data(self): - """Convert operation to data that can be converted to json or others. - - Warning: - Current state returns ObjectId objects which cannot be parsed by - json. - - Returns: - Dict[str, Any]: Description of operation. - """ - - return { - "id": self._id, - "entity_type": self.entity_type, - "project_name": self.project_name, - "operation": self.operation_name - } - - -class CreateOperation(AbstractOperation): - """Operation to create an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - data (Dict[str, Any]): Data of entity that will be created. - """ - - operation_name = "create" - - def __init__(self, project_name, entity_type, data): - super(CreateOperation, self).__init__(project_name, entity_type) - - if not data: - data = {} - else: - data = copy.deepcopy(dict(data)) - self._data = data - - def __setitem__(self, key, value): - self.set_value(key, value) - - def __getitem__(self, key): - return self.data[key] - - def set_value(self, key, value): - self.data[key] = value - - def get(self, key, *args, **kwargs): - return self.data.get(key, *args, **kwargs) - - @abstractproperty - def entity_id(self): - pass - - @property - def data(self): - return self._data - - def to_data(self): - output = super(CreateOperation, self).to_data() - output["data"] = copy.deepcopy(self.data) - return output - - -class UpdateOperation(AbstractOperation): - """Operation to update an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - entity_id (Union[str, ObjectId]): Identifier of an entity. - update_data (Dict[str, Any]): Key -> value changes that will be set in - database. If value is set to 'REMOVED_VALUE' the key will be - removed. Only first level of dictionary is checked (on purpose). - """ - - operation_name = "update" - - def __init__(self, project_name, entity_type, entity_id, update_data): - super(UpdateOperation, self).__init__(project_name, entity_type) - - self._entity_id = entity_id - self._update_data = update_data - - @property - def entity_id(self): - return self._entity_id - - @property - def update_data(self): - return self._update_data - - def to_data(self): - changes = {} - for key, value in self._update_data.items(): - if value is REMOVED_VALUE: - value = None - changes[key] = value - - output = super(UpdateOperation, self).to_data() - output.update({ - "entity_id": self.entity_id, - "changes": changes - }) - return output - - -class DeleteOperation(AbstractOperation): - """Operation to delete an entity. - - Args: - project_name (str): On which project operation will happen. - entity_type (str): Type of entity on which change happens. - e.g. 'asset', 'representation' etc. - entity_id (Union[str, ObjectId]): Entity id that will be removed. - """ - - operation_name = "delete" - - def __init__(self, project_name, entity_type, entity_id): - super(DeleteOperation, self).__init__(project_name, entity_type) - - self._entity_id = entity_id - - @property - def entity_id(self): - return self._entity_id - - def to_data(self): - output = super(DeleteOperation, self).to_data() - output["entity_id"] = self.entity_id - return output - - -class BaseOperationsSession(object): - """Session storing operations that should happen in an order. - - At this moment does not handle anything special can be considered as - stupid list of operations that will happen after each other. If creation - of same entity is there multiple times it's handled in any way and document - values are not validated. - """ - - def __init__(self): - self._operations = [] - - def __len__(self): - return len(self._operations) - - def add(self, operation): - """Add operation to be processed. - - Args: - operation (BaseOperation): Operation that should be processed. - """ - if not isinstance( - operation, - (CreateOperation, UpdateOperation, DeleteOperation) - ): - raise TypeError("Expected Operation object got {}".format( - str(type(operation)) - )) - - self._operations.append(operation) - - def append(self, operation): - """Add operation to be processed. - - Args: - operation (BaseOperation): Operation that should be processed. - """ - - self.add(operation) - - def extend(self, operations): - """Add operations to be processed. - - Args: - operations (List[BaseOperation]): Operations that should be - processed. - """ - - for operation in operations: - self.add(operation) - - def remove(self, operation): - """Remove operation.""" - - self._operations.remove(operation) - - def clear(self): - """Clear all registered operations.""" - - self._operations = [] - - def to_data(self): - return [ - operation.to_data() - for operation in self._operations - ] - - @abstractmethod - def commit(self): - """Commit session operations.""" - pass - - def create_entity(self, project_name, entity_type, data): - """Fast access to 'CreateOperation'. - - Returns: - CreateOperation: Object of update operation. - """ - - operation = CreateOperation(project_name, entity_type, data) - self.add(operation) - return operation - - def update_entity(self, project_name, entity_type, entity_id, update_data): - """Fast access to 'UpdateOperation'. - - Returns: - UpdateOperation: Object of update operation. - """ - - operation = UpdateOperation( - project_name, entity_type, entity_id, update_data - ) - self.add(operation) - return operation - - def delete_entity(self, project_name, entity_type, entity_id): - """Fast access to 'DeleteOperation'. - - Returns: - DeleteOperation: Object of delete operation. - """ - - operation = DeleteOperation(project_name, entity_type, entity_id) - self.add(operation) - return operation diff --git a/client/ayon_core/client/utils.py b/client/ayon_core/client/utils.py deleted file mode 100644 index 26da6e34e1..0000000000 --- a/client/ayon_core/client/utils.py +++ /dev/null @@ -1,134 +0,0 @@ -import os -import uuid - -import ayon_api - -from ayon_core.client.operations_base import REMOVED_VALUE - - -class _GlobalCache: - initialized = False - - -def get_ayon_server_api_connection(): - if _GlobalCache.initialized: - con = ayon_api.get_server_api_connection() - else: - from ayon_core.lib.local_settings import get_local_site_id - - _GlobalCache.initialized = True - site_id = get_local_site_id() - version = os.getenv("AYON_VERSION") - if ayon_api.is_connection_created(): - con = ayon_api.get_server_api_connection() - con.set_site_id(site_id) - con.set_client_version(version) - else: - con = ayon_api.create_connection(site_id, version) - return con - - -def create_entity_id(): - return uuid.uuid1().hex - - -def prepare_attribute_changes(old_entity, new_entity, replace=False): - """Prepare changes of attributes on entities. - - Compare 'attrib' of old and new entity data to prepare only changed - values that should be sent to server for update. - - Example: - >>> # Limited entity data to 'attrib' - >>> old_entity = { - ... "attrib": {"attr_1": 1, "attr_2": "MyString", "attr_3": True} - ... } - >>> new_entity = { - ... "attrib": {"attr_1": 2, "attr_3": True, "attr_4": 3} - ... } - >>> # Changes if replacement should not happen - >>> expected_changes = { - ... "attr_1": 2, - ... "attr_4": 3 - ... } - >>> changes = prepare_attribute_changes(old_entity, new_entity) - >>> changes == expected_changes - True - - >>> # Changes if replacement should happen - >>> expected_changes_replace = { - ... "attr_1": 2, - ... "attr_2": REMOVED_VALUE, - ... "attr_4": 3 - ... } - >>> changes_replace = prepare_attribute_changes( - ... old_entity, new_entity, True) - >>> changes_replace == expected_changes_replace - True - - Args: - old_entity (dict[str, Any]): Data of entity queried from server. - new_entity (dict[str, Any]): Entity data with applied changes. - replace (bool): New entity should fully replace all old entity values. - - Returns: - Dict[str, Any]: Values from new entity only if value has changed. - """ - - attrib_changes = {} - new_attrib = new_entity.get("attrib") - old_attrib = old_entity.get("attrib") - if new_attrib is None: - if not replace: - return attrib_changes - new_attrib = {} - - if old_attrib is None: - return new_attrib - - for attr, new_attr_value in new_attrib.items(): - old_attr_value = old_attrib.get(attr) - if old_attr_value != new_attr_value: - attrib_changes[attr] = new_attr_value - - if replace: - for attr in old_attrib: - if attr not in new_attrib: - attrib_changes[attr] = REMOVED_VALUE - - return attrib_changes - - -def prepare_entity_changes(old_entity, new_entity, replace=False): - """Prepare changes of AYON entities. - - Compare old and new entity to filter values from new data that changed. - - Args: - old_entity (dict[str, Any]): Data of entity queried from server. - new_entity (dict[str, Any]): Entity data with applied changes. - replace (bool): All attributes should be replaced by new values. So - all attribute values that are not on new entity will be removed. - - Returns: - Dict[str, Any]: Only values from new entity that changed. - """ - - changes = {} - for key, new_value in new_entity.items(): - if key == "attrib": - continue - - old_value = old_entity.get(key) - if old_value != new_value: - changes[key] = new_value - - if replace: - for key in old_entity: - if key not in new_entity: - changes[key] = REMOVED_VALUE - - attr_changes = prepare_attribute_changes(old_entity, new_entity, replace) - if attr_changes: - changes["attrib"] = attr_changes - return changes diff --git a/client/ayon_core/hooks/pre_add_last_workfile_arg.py b/client/ayon_core/hooks/pre_add_last_workfile_arg.py index d11bb106d6..74964e0df9 100644 --- a/client/ayon_core/hooks/pre_add_last_workfile_arg.py +++ b/client/ayon_core/hooks/pre_add_last_workfile_arg.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddLastWorkfileToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_copy_template_workfile.py b/client/ayon_core/hooks/pre_copy_template_workfile.py index df2a0386b2..c884116578 100644 --- a/client/ayon_core/hooks/pre_copy_template_workfile.py +++ b/client/ayon_core/hooks/pre_copy_template_workfile.py @@ -1,7 +1,7 @@ import os import shutil from ayon_core.settings import get_project_settings -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import ( get_custom_workfile_template, get_custom_workfile_template_by_string_context @@ -54,21 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook): self.log.info("Last workfile does not exist.") project_name = self.data["project_name"] - asset_name = self.data["folder_path"] + folder_path = self.data["folder_path"] task_name = self.data["task_name"] host_name = self.application.host_name project_settings = get_project_settings(project_name) - project_doc = self.data.get("project_doc") - asset_doc = self.data.get("asset_doc") + project_entity = self.data.get("project_entity") + folder_entity = self.data.get("folder_entity") + task_entity = self.data.get("task_entity") anatomy = self.data.get("anatomy") - if project_doc and asset_doc: + if project_entity and folder_entity and task_entity: self.log.debug("Started filtering of custom template paths.") template_path = get_custom_workfile_template( - project_doc, - asset_doc, - task_name, + project_entity, + folder_entity, + task_entity, host_name, anatomy, project_settings @@ -81,7 +82,7 @@ class CopyTemplateWorkfile(PreLaunchHook): )) template_path = get_custom_workfile_template_by_string_context( project_name, - asset_name, + folder_path, task_name, host_name, anatomy, diff --git a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py index 72c6bf2f68..8cbdaa338e 100644 --- a/client/ayon_core/hooks/pre_create_extra_workdir_folders.py +++ b/client/ayon_core/hooks/pre_create_extra_workdir_folders.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.pipeline.workfile import create_workdir_extra_folders diff --git a/client/ayon_core/hooks/pre_global_host_data.py b/client/ayon_core/hooks/pre_global_host_data.py index de6d4acc8b..e93b512742 100644 --- a/client/ayon_core/hooks/pre_global_host_data.py +++ b/client/ayon_core/hooks/pre_global_host_data.py @@ -1,6 +1,7 @@ -from ayon_core.client import get_project, get_asset_by_name -from ayon_core.lib.applications import ( - PreLaunchHook, +from ayon_api import get_project, get_folder_by_path, get_task_by_name + +from ayon_applications import PreLaunchHook +from ayon_applications.utils import ( EnvironmentPrepData, prepare_app_environments, prepare_context_environments @@ -16,7 +17,7 @@ class GlobalHostDataHook(PreLaunchHook): """Prepare global objects to `data` that will be used for sure.""" self.prepare_global_data() - if not self.data.get("asset_doc"): + if not self.data.get("folder_entity"): return app = self.launch_context.application @@ -27,8 +28,9 @@ class GlobalHostDataHook(PreLaunchHook): "app": app, - "project_doc": self.data["project_doc"], - "asset_doc": self.data["asset_doc"], + "project_entity": self.data["project_entity"], + "folder_entity": self.data["folder_entity"], + "task_entity": self.data["task_entity"], "anatomy": self.data["anatomy"], @@ -59,19 +61,37 @@ class GlobalHostDataHook(PreLaunchHook): return self.log.debug("Project name is set to \"{}\"".format(project_name)) + + # Project Entity + project_entity = get_project(project_name) + self.data["project_entity"] = project_entity + # Anatomy - self.data["anatomy"] = Anatomy(project_name) + self.data["anatomy"] = Anatomy( + project_name, project_entity=project_entity + ) - # Project document - project_doc = get_project(project_name) - self.data["project_doc"] = project_doc - - asset_name = self.data.get("folder_path") - if not asset_name: + folder_path = self.data.get("folder_path") + if not folder_path: self.log.warning( - "Asset name was not set. Skipping asset document query." + "Folder path is not set. Skipping folder query." ) return - asset_doc = get_asset_by_name(project_name, asset_name) - self.data["asset_doc"] = asset_doc + folder_entity = get_folder_by_path(project_name, folder_path) + self.data["folder_entity"] = folder_entity + + task_name = self.data.get("task_name") + if not task_name: + self.log.warning( + "Task name is not set. Skipping task query." + ) + return + + if not folder_entity: + return + + task_entity = get_task_by_name( + project_name, folder_entity["id"], task_name + ) + self.data["task_entity"] = task_entity \ No newline at end of file diff --git a/client/ayon_core/hooks/pre_mac_launch.py b/client/ayon_core/hooks/pre_mac_launch.py index 34680155f1..b234a20310 100644 --- a/client/ayon_core/hooks/pre_mac_launch.py +++ b/client/ayon_core/hooks/pre_mac_launch.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchWithTerminal(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_new_console_apps.py b/client/ayon_core/hooks/pre_new_console_apps.py index c81b924573..9777d37900 100644 --- a/client/ayon_core/hooks/pre_new_console_apps.py +++ b/client/ayon_core/hooks/pre_new_console_apps.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class LaunchNewConsoleApps(PreLaunchHook): diff --git a/client/ayon_core/hooks/pre_non_python_host_launch.py b/client/ayon_core/hooks/pre_non_python_host_launch.py deleted file mode 100644 index fed4c99447..0000000000 --- a/client/ayon_core/hooks/pre_non_python_host_launch.py +++ /dev/null @@ -1,58 +0,0 @@ -import os - -from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import ( - get_non_python_host_kwargs, - PreLaunchHook, - LaunchTypes, -) - -from ayon_core import AYON_CORE_ROOT - - -class NonPythonHostHook(PreLaunchHook): - """Launch arguments preparation. - - Non python host implementation do not launch host directly but use - python script which launch the host. For these cases it is necessary to - prepend python (or ayon) executable and script path before application's. - """ - app_groups = {"harmony", "photoshop", "aftereffects"} - - order = 20 - launch_types = {LaunchTypes.local} - - def execute(self): - # Pop executable - executable_path = self.launch_context.launch_args.pop(0) - - # Pop rest of launch arguments - There should not be other arguments! - remainders = [] - while self.launch_context.launch_args: - remainders.append(self.launch_context.launch_args.pop(0)) - - script_path = os.path.join( - AYON_CORE_ROOT, - "scripts", - "non_python_host_launch.py" - ) - - new_launch_args = get_ayon_launcher_args( - "run", script_path, executable_path - ) - # Add workfile path if exists - workfile_path = self.data["last_workfile_path"] - if ( - self.data.get("start_last_workfile") - and workfile_path - and os.path.exists(workfile_path)): - new_launch_args.append(workfile_path) - - # Append as whole list as these areguments should not be separated - self.launch_context.launch_args.append(new_launch_args) - - if remainders: - self.launch_context.launch_args.extend(remainders) - - self.launch_context.kwargs = \ - get_non_python_host_kwargs(self.launch_context.kwargs) diff --git a/client/ayon_core/hooks/pre_ocio_hook.py b/client/ayon_core/hooks/pre_ocio_hook.py index 08d9563975..0817afec71 100644 --- a/client/ayon_core/hooks/pre_ocio_hook.py +++ b/client/ayon_core/hooks/pre_ocio_hook.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook from ayon_core.pipeline.colorspace import get_imageio_config from ayon_core.pipeline.template_data import get_template_data_with_names @@ -28,7 +28,7 @@ class OCIOEnvHook(PreLaunchHook): template_data = get_template_data_with_names( project_name=self.data["project_name"], - asset_name=self.data["folder_path"], + folder_path=self.data["folder_path"], task_name=self.data["task_name"], host_name=self.host_name, settings=self.data["project_settings"] diff --git a/client/ayon_core/host/dirmap.py b/client/ayon_core/host/dirmap.py index effafb6261..2e24877d28 100644 --- a/client/ayon_core/host/dirmap.py +++ b/client/ayon_core/host/dirmap.py @@ -36,23 +36,23 @@ class HostDirmap(object): host_name, project_name, project_settings=None, - sync_module=None + sitesync_addon=None ): self.host_name = host_name self.project_name = project_name self._project_settings = project_settings - self._sync_module = sync_module + self._sitesync_addon = sitesync_addon # to limit reinit of Modules - self._sync_module_discovered = sync_module is not None + self._sitesync_addon_discovered = sitesync_addon is not None self._log = None @property - def sync_module(self): - if not self._sync_module_discovered: - self._sync_module_discovered = True + def sitesync_addon(self): + if not self._sitesync_addon_discovered: + self._sitesync_addon_discovered = True manager = AddonsManager() - self._sync_module = manager.get("sync_server") - return self._sync_module + self._sitesync_addon = manager.get("sitesync") + return self._sitesync_addon @property def project_settings(self): @@ -158,25 +158,25 @@ class HostDirmap(object): """ project_name = self.project_name - sync_module = self.sync_module + sitesync_addon = self.sitesync_addon mapping = {} if ( - sync_module is None - or not sync_module.enabled - or project_name not in sync_module.get_enabled_projects() + sitesync_addon is None + or not sitesync_addon.enabled + or project_name not in sitesync_addon.get_enabled_projects() ): return mapping - active_site = sync_module.get_local_normalized_site( - sync_module.get_active_site(project_name)) - remote_site = sync_module.get_local_normalized_site( - sync_module.get_remote_site(project_name)) + active_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_active_site(project_name)) + remote_site = sitesync_addon.get_local_normalized_site( + sitesync_addon.get_remote_site(project_name)) self.log.debug( "active {} - remote {}".format(active_site, remote_site) ) if active_site == "local" and active_site != remote_site: - sync_settings = sync_module.get_sync_project_setting( + sync_settings = sitesync_addon.get_sync_project_setting( project_name, exclude_locals=False, cached=False) @@ -194,7 +194,7 @@ class HostDirmap(object): self.log.debug("remote overrides {}".format(remote_overrides)) current_platform = platform.system().lower() - remote_provider = sync_module.get_provider_for_site( + remote_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) # dirmap has sense only with regular disk provider, in the workfile diff --git a/client/ayon_core/host/host.py b/client/ayon_core/host/host.py index f79c22824b..081aafdbe3 100644 --- a/client/ayon_core/host/host.py +++ b/client/ayon_core/host/host.py @@ -18,7 +18,7 @@ class HostBase(object): Compared to 'avalon' concept: What was before considered as functions in host implementation folder. The host implementation should primarily care about adding ability of creation - (mark subsets to be published) and optionally about referencing published + (mark products to be published) and optionally about referencing published representations as containers. Host may need extend some functionality like working with workfiles @@ -108,7 +108,7 @@ class HostBase(object): return os.environ.get("AYON_PROJECT_NAME") - def get_current_asset_name(self): + def get_current_folder_path(self): """ Returns: Union[str, None]: Current asset name. @@ -139,7 +139,7 @@ class HostBase(object): return { "project_name": self.get_current_project_name(), - "folder_path": self.get_current_asset_name(), + "folder_path": self.get_current_folder_path(), "task_name": self.get_current_task_name() } @@ -161,13 +161,13 @@ class HostBase(object): # Use current context to fill the context title current_context = self.get_current_context() project_name = current_context["project_name"] - asset_name = current_context["folder_path"] + folder_path = current_context["folder_path"] task_name = current_context["task_name"] items = [] if project_name: items.append(project_name) - if asset_name: - items.append(asset_name.lstrip("/")) + if folder_path: + items.append(folder_path.lstrip("/")) if task_name: items.append(task_name) if items: diff --git a/client/ayon_core/hosts/aftereffects/__init__.py b/client/ayon_core/hosts/aftereffects/__init__.py index ae750d05b6..02ab287629 100644 --- a/client/ayon_core/hosts/aftereffects/__init__.py +++ b/client/ayon_core/hosts/aftereffects/__init__.py @@ -1,6 +1,12 @@ -from .addon import AfterEffectsAddon +from .addon import ( + AFTEREFFECTS_ADDON_ROOT, + AfterEffectsAddon, + get_launch_script_path, +) __all__ = ( + "AFTEREFFECTS_ADDON_ROOT", "AfterEffectsAddon", + "get_launch_script_path", ) diff --git a/client/ayon_core/hosts/aftereffects/addon.py b/client/ayon_core/hosts/aftereffects/addon.py index 46d0818247..fc54043c1d 100644 --- a/client/ayon_core/hosts/aftereffects/addon.py +++ b/client/ayon_core/hosts/aftereffects/addon.py @@ -1,5 +1,9 @@ +import os + from ayon_core.addon import AYONAddon, IHostAddon +AFTEREFFECTS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) + class AfterEffectsAddon(AYONAddon, IHostAddon): name = "aftereffects" @@ -17,3 +21,16 @@ class AfterEffectsAddon(AYONAddon, IHostAddon): def get_workfile_extensions(self): return [".aep"] + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(AFTEREFFECTS_ADDON_ROOT, "hooks") + ] + + +def get_launch_script_path(): + return os.path.join( + AFTEREFFECTS_ADDON_ROOT, "api", "launch_script.py" + ) diff --git a/client/ayon_core/hosts/aftereffects/api/__init__.py b/client/ayon_core/hosts/aftereffects/api/__init__.py index 28062cc35d..b1d83c5ad9 100644 --- a/client/ayon_core/hosts/aftereffects/api/__init__.py +++ b/client/ayon_core/hosts/aftereffects/api/__init__.py @@ -17,7 +17,7 @@ from .pipeline import ( from .lib import ( maintained_selection, get_extension_manifest_path, - get_asset_settings, + get_folder_settings, set_settings ) @@ -31,13 +31,14 @@ __all__ = [ "get_stub", # pipeline + "AfterEffectsHost", "ls", "containerise", # lib "maintained_selection", "get_extension_manifest_path", - "get_asset_settings", + "get_folder_settings", "set_settings", # plugin diff --git a/client/ayon_core/hosts/aftereffects/api/launch_logic.py b/client/ayon_core/hosts/aftereffects/api/launch_logic.py index 0d1a6cf585..5a23f2cb35 100644 --- a/client/ayon_core/hosts/aftereffects/api/launch_logic.py +++ b/client/ayon_core/hosts/aftereffects/api/launch_logic.py @@ -7,7 +7,6 @@ import asyncio import functools import traceback - from wsrpc_aiohttp import ( WebSocketRoute, WebSocketAsync @@ -286,20 +285,21 @@ class AfterEffectsRoute(WebSocketRoute): # This method calls function on the client side # client functions - async def set_context(self, project, asset, task): + async def set_context(self, project, folder, task): """ - Sets 'project' and 'asset' to envs, eg. setting context + Sets 'project', 'folder' and 'task' to envs, eg. setting context Args: project (str) - asset (str) + folder (str) + task (str) """ log.info("Setting context change") - log.info("project {} asset {} ".format(project, asset)) + log.info("project {} folder {} ".format(project, folder)) if project: os.environ["AYON_PROJECT_NAME"] = project - if asset: - os.environ["AYON_FOLDER_PATH"] = asset + if folder: + os.environ["AYON_FOLDER_PATH"] = folder if task: os.environ["AYON_TASK_NAME"] = task diff --git a/client/ayon_core/scripts/non_python_host_launch.py b/client/ayon_core/hosts/aftereffects/api/launch_script.py similarity index 77% rename from client/ayon_core/scripts/non_python_host_launch.py rename to client/ayon_core/hosts/aftereffects/api/launch_script.py index 4c18fd0ccc..87926c022b 100644 --- a/client/ayon_core/scripts/non_python_host_launch.py +++ b/client/ayon_core/hosts/aftereffects/api/launch_script.py @@ -1,4 +1,4 @@ -"""Script wraps launch mechanism of non python host implementations. +"""Script wraps launch mechanism of AfterEffects implementations. Arguments passed to the script are passed to launch function in host implementation. In all cases requires host app executable and may contain @@ -8,6 +8,8 @@ workfile or others. import os import sys +from ayon_core.hosts.aftereffects.api.launch_logic import main as host_main + # Get current file to locate start point of sys.argv CURRENT_FILE = os.path.abspath(__file__) @@ -79,26 +81,9 @@ def main(argv): if after_script_idx is not None: launch_args = sys_args[after_script_idx:] - host_name = os.environ["AYON_HOST_NAME"].lower() - if host_name == "photoshop": - # TODO refactor launch logic according to AE - from ayon_core.hosts.photoshop.api.lib import main - elif host_name == "aftereffects": - from ayon_core.hosts.aftereffects.api.launch_logic import main - elif host_name == "harmony": - from ayon_core.hosts.harmony.api.lib import main - else: - title = "Unknown host name" - message = ( - "BUG: Environment variable AYON_HOST_NAME contains unknown" - " host name \"{}\"" - ).format(host_name) - show_error_messagebox(title, message) - return - if launch_args: # Launch host implementation - main(*launch_args) + host_main(*launch_args) else: # Show message box on_invalid_args(after_script_idx is None) diff --git a/client/ayon_core/hosts/aftereffects/api/lib.py b/client/ayon_core/hosts/aftereffects/api/lib.py index 0a2ee7b7ac..d476378dcd 100644 --- a/client/ayon_core/hosts/aftereffects/api/lib.py +++ b/client/ayon_core/hosts/aftereffects/api/lib.py @@ -4,8 +4,10 @@ import json import contextlib import logging +import ayon_api + from ayon_core.pipeline.context_tools import get_current_context -from ayon_core.client import get_asset_by_name + from .ws_stub import get_stub log = logging.getLogger(__name__) @@ -85,21 +87,21 @@ def get_background_layers(file_url): return layers -def get_asset_settings(asset_doc): - """Get settings on current asset from database. +def get_folder_settings(folder_entity): + """Get settings of current folder. Returns: dict: Scene data. """ - asset_data = asset_doc["data"] - fps = asset_data.get("fps", 0) - frame_start = asset_data.get("frameStart", 0) - frame_end = asset_data.get("frameEnd", 0) - handle_start = asset_data.get("handleStart", 0) - handle_end = asset_data.get("handleEnd", 0) - resolution_width = asset_data.get("resolutionWidth", 0) - resolution_height = asset_data.get("resolutionHeight", 0) + folder_attributes = folder_entity["attrib"] + fps = folder_attributes.get("fps", 0) + frame_start = folder_attributes.get("frameStart", 0) + frame_end = folder_attributes.get("frameEnd", 0) + handle_start = folder_attributes.get("handleStart", 0) + handle_end = folder_attributes.get("handleEnd", 0) + resolution_width = folder_attributes.get("resolutionWidth", 0) + resolution_height = folder_attributes.get("resolutionHeight", 0) duration = (frame_end - frame_start + 1) + handle_start + handle_end return { @@ -127,9 +129,11 @@ def set_settings(frames, resolution, comp_ids=None, print_msg=True): frame_start = frames_duration = fps = width = height = None current_context = get_current_context() - asset_doc = get_asset_by_name(current_context["project_name"], - current_context["folder_path"]) - settings = get_asset_settings(asset_doc) + folder_entity = ayon_api.get_folder_by_path( + current_context["project_name"], + current_context["folder_path"] + ) + settings = get_folder_settings(folder_entity) msg = '' if frames: diff --git a/client/ayon_core/hosts/aftereffects/api/pipeline.py b/client/ayon_core/hosts/aftereffects/api/pipeline.py index 7ed244fd1d..105fee64b9 100644 --- a/client/ayon_core/hosts/aftereffects/api/pipeline.py +++ b/client/ayon_core/hosts/aftereffects/api/pipeline.py @@ -271,7 +271,7 @@ def containerise(name, "name": name, "namespace": namespace, "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "members": comp.members or [comp.id] } diff --git a/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py new file mode 100644 index 0000000000..a37481566e --- /dev/null +++ b/client/ayon_core/hosts/aftereffects/hooks/pre_launch_args.py @@ -0,0 +1,88 @@ +import os +import platform +import subprocess + +from ayon_core.lib import ( + get_ayon_launcher_args, + is_using_ayon_console, +) +from ayon_applications import PreLaunchHook, LaunchTypes +from ayon_core.hosts.aftereffects import get_launch_script_path + + +def get_launch_kwargs(kwargs): + """Explicit setting of kwargs for Popen for AfterEffects. + + Expected behavior + - ayon_console opens window with logs + - ayon has stdout/stderr available for capturing + + Args: + kwargs (Union[dict, None]): Current kwargs or None. + + """ + if kwargs is None: + kwargs = {} + + if platform.system().lower() != "windows": + return kwargs + + if is_using_ayon_console(): + kwargs.update({ + "creationflags": subprocess.CREATE_NEW_CONSOLE + }) + else: + kwargs.update({ + "creationflags": subprocess.CREATE_NO_WINDOW, + "stdout": subprocess.DEVNULL, + "stderr": subprocess.DEVNULL + }) + return kwargs + + +class AEPrelaunchHook(PreLaunchHook): + """Launch arguments preparation. + + Hook add python executable and script path to AE implementation before + AE executable and add last workfile path to launch arguments. + + Existence of last workfile is checked. If workfile does not exists tries + to copy templated workfile from predefined path. + """ + app_groups = {"aftereffects"} + + order = 20 + launch_types = {LaunchTypes.local} + + def execute(self): + # Pop executable + executable_path = self.launch_context.launch_args.pop(0) + + # Pop rest of launch arguments - There should not be other arguments! + remainders = [] + while self.launch_context.launch_args: + remainders.append(self.launch_context.launch_args.pop(0)) + + script_path = get_launch_script_path() + + new_launch_args = get_ayon_launcher_args( + "run", script_path, executable_path + ) + # Add workfile path if exists + workfile_path = self.data["last_workfile_path"] + if ( + self.data.get("start_last_workfile") + and workfile_path + and os.path.exists(workfile_path) + ): + new_launch_args.append(workfile_path) + + # Append as whole list as these arguments should not be separated + self.launch_context.launch_args.append(new_launch_args) + + if remainders: + self.launch_context.launch_args.extend(remainders) + + self.launch_context.kwargs = get_launch_kwargs( + self.launch_context.kwargs + ) diff --git a/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py b/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py index 93aec33222..29df34876a 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/create/create_render.py @@ -218,7 +218,13 @@ class RenderCreator(Creator): """ def get_dynamic_data( - self, project_name, asset_doc, task_name, variant, host_name, instance + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ): dynamic_data = {} if instance is not None: diff --git a/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py b/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py index 282e06d0bf..b46e82bf1a 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py +++ b/client/ayon_core/hosts/aftereffects/plugins/create/workfile_creator.py @@ -1,5 +1,6 @@ +import ayon_api + import ayon_core.hosts.aftereffects.api as api -from ayon_core.client import get_asset_by_name from ayon_core.pipeline import ( AutoCreator, CreatedInstance @@ -39,32 +40,37 @@ class AEWorkfileCreator(AutoCreator): context = self.create_context project_name = context.get_current_project_name() - asset_name = context.get_current_asset_name() + folder_path = context.get_current_folder_path() task_name = context.get_current_task_name() host_name = context.host_name - existing_asset_name = None + existing_folder_path = None if existing_instance is not None: - existing_asset_name = existing_instance.get("folderPath") + existing_folder_path = existing_instance.get("folderPath") if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": self.default_variant, } data.update(self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, None, @@ -79,17 +85,22 @@ class AEWorkfileCreator(AutoCreator): new_instance.data_to_store()) elif ( - existing_asset_name != asset_name + existing_folder_path != folder_path or existing_instance["task"] != task_name ): - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_path existing_instance["task"] = task_name existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py b/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py index c7f743fce4..5685011d5f 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py +++ b/client/ayon_core/hosts/aftereffects/plugins/load/load_background.py @@ -20,8 +20,8 @@ class BackgroundLoader(api.AfterEffectsLoader): metadata """ label = "Load JSON Background" - families = ["background"] - representations = ["json"] + product_types = {"background"} + representations = {"json"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() @@ -31,7 +31,7 @@ class BackgroundLoader(api.AfterEffectsLoader): comp_name = get_unique_layer_name( existing_items, - "{}_{}".format(context["asset"]["name"], name)) + "{}_{}".format(context["folder"]["name"], name)) path = self.filepath_from_context(context) layers = get_background_layers(path) @@ -56,16 +56,19 @@ class BackgroundLoader(api.AfterEffectsLoader): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): """ Switch asset or change version """ stub = self.get_stub() - context = representation.get("context", {}) + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + repre_entity = context["representation"] + _ = container.pop("layer") # without iterator number (_001, 002...) namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) - comp_name = "{}_{}".format(context["asset"], context["subset"]) + comp_name = "{}_{}".format(folder_name, product_name) # switching assets if namespace_from_container != comp_name: @@ -73,11 +76,11 @@ class BackgroundLoader(api.AfterEffectsLoader): existing_items = [layer.name for layer in items] comp_name = get_unique_layer_name( existing_items, - "{}_{}".format(context["asset"], context["subset"])) + "{}_{}".format(folder_name, product_name)) else: # switching version - keep same name comp_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) layers = get_background_layers(path) comp = stub.reload_background(container["members"][1], @@ -85,8 +88,8 @@ class BackgroundLoader(api.AfterEffectsLoader): layers) # update container - container["representation"] = str(representation["_id"]) - container["name"] = context["subset"] + container["representation"] = repre_entity["id"] + container["name"] = product_name container["namespace"] = comp_name container["members"] = comp.members @@ -104,5 +107,5 @@ class BackgroundLoader(api.AfterEffectsLoader): stub.imprint(layer.id, {}) stub.delete_item(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py b/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py index a8e67e9f88..4b81201722 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py +++ b/client/ayon_core/hosts/aftereffects/plugins/load/load_file.py @@ -12,20 +12,25 @@ class FileLoader(api.AfterEffectsLoader): """ label = "Load file" - families = ["image", - "plate", - "render", - "prerender", - "review", - "audio"] - representations = ["*"] + product_types = { + "image", + "plate", + "render", + "prerender", + "review", + "audio", + } + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() layers = stub.get_items(comps=True, folders=True, footages=True) existing_layers = [layer.name for layer in layers] comp_name = get_unique_layer_name( - existing_layers, "{}_{}".format(context["asset"]["name"], name)) + existing_layers, "{}_{}".format( + context["folder"]["name"], name + ) + ) import_options = {} @@ -35,7 +40,7 @@ class FileLoader(api.AfterEffectsLoader): import_options['sequence'] = True if not path: - repr_id = context["representation"]["_id"] + repr_id = context["representation"]["id"] self.log.warning( "Representation id `{}` is failing to load".format(repr_id)) return @@ -64,31 +69,33 @@ class FileLoader(api.AfterEffectsLoader): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): """ Switch asset or change version """ stub = self.get_stub() layer = container.pop("layer") - context = representation.get("context", {}) + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + repre_entity = context["representation"] namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) - layer_name = "{}_{}".format(context["asset"], context["subset"]) + layer_name = "{}_{}".format(folder_name, product_name) # switching assets if namespace_from_container != layer_name: layers = stub.get_items(comps=True) existing_layers = [layer.name for layer in layers] layer_name = get_unique_layer_name( existing_layers, - "{}_{}".format(context["asset"], context["subset"])) + "{}_{}".format(folder_name, product_name)) else: # switching version - keep same name layer_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) # with aftereffects.maintained_selection(): # TODO stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name) stub.imprint( - layer.id, {"representation": str(representation["_id"]), - "name": context["subset"], + layer.id, {"representation": repre_entity["id"], + "name": product_name, "namespace": layer_name} ) @@ -103,5 +110,5 @@ class FileLoader(api.AfterEffectsLoader): stub.imprint(layer.id, {}) stub.delete_item(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py index afd58ca758..c28042b6ae 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/collect_render.py @@ -1,14 +1,11 @@ import os -import re import tempfile -import attr +import attr import pyblish.api -from ayon_core.settings import get_project_settings from ayon_core.pipeline import publish from ayon_core.pipeline.publish import RenderInstance - from ayon_core.hosts.aftereffects.api import get_stub @@ -44,7 +41,6 @@ class CollectAERender(publish.AbstractCollectRender): def get_instances(self, context): instances = [] - instances_to_remove = [] app_version = CollectAERender.get_stub().get_app_version() app_version = app_version[0:4] @@ -120,7 +116,10 @@ class CollectAERender(publish.AbstractCollectRender): fps=fps, app_version=app_version, publish_attributes=inst.data.get("publish_attributes", {}), - file_names=[item.file_name for item in render_q] + file_names=[item.file_name for item in render_q], + + # The source instance this render instance replaces + source_instance=inst ) comp = compositions_by_id.get(comp_id) @@ -148,10 +147,7 @@ class CollectAERender(publish.AbstractCollectRender): instance.families.remove("review") instances.append(instance) - instances_to_remove.append(inst) - for instance in instances_to_remove: - context.remove(instance) return instances def get_expected_files(self, render_instance): diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml b/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml index d89a851c64..23e1b50551 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_instance_asset.xml @@ -1,7 +1,7 @@ -Subset context +Product context ## Invalid product context @@ -15,7 +15,7 @@ You can fix this with "repair" button on the right and refresh Publish at the bo ### __Detailed Info__ (optional) This might happen if you are reuse old workfile and open it in different context. -(Eg. you created product name "renderCompositingDefault" from folder "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing product for "Robot" asset stayed in the workfile.) +(Eg. you created product name "renderCompositingDefault" from folder "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing product for "Robot" folder stayed in the workfile.) \ No newline at end of file diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml b/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml index 0591020ed3..b2da7af114 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/help/validate_scene_settings.xml @@ -5,20 +5,20 @@ ## Invalid scene setting found -One of the settings in a scene doesn't match to asset settings in database. +One of the settings in a scene doesn't match to folder settings in database. {invalid_setting_str} ### How to repair? -Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there. +Change values for {invalid_keys_str} in the scene OR change them in the folder database if they are wrong there. In the scene it is right mouse click on published composition > `Composition Settings`. ### __Detailed Info__ (optional) -This error is shown when for example resolution in the scene doesn't match to resolution set on the asset in the database. +This error is shown when for example resolution in the scene doesn't match to resolution set on the folder in the database. Either value in the database or in the scene is wrong. diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py index e8f2e29a2f..c4411bd4c2 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/validate_instance_asset.py @@ -1,6 +1,6 @@ import pyblish.api -from ayon_core.pipeline import get_current_asset_name +from ayon_core.pipeline import get_current_folder_path from ayon_core.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -8,8 +8,8 @@ from ayon_core.pipeline.publish import ( from ayon_core.hosts.aftereffects.api import get_stub -class ValidateInstanceAssetRepair(pyblish.api.Action): - """Repair the instance asset with value from Context.""" +class ValidateInstanceFolderRepair(pyblish.api.Action): + """Repair the instance folder with value from Context.""" label = "Repair" icon = "wrench" @@ -30,35 +30,35 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): for instance in instances: data = stub.read(instance[0]) - data["folderPath"] = get_current_asset_name() + data["folderPath"] = get_current_folder_path() stub.imprint(instance[0].instance_id, data) -class ValidateInstanceAsset(pyblish.api.InstancePlugin): - """Validate the instance asset is the current selected context asset. +class ValidateInstanceFolder(pyblish.api.InstancePlugin): + """Validate the instance folder is the current selected context folder. As it might happen that multiple worfiles are opened at same time, switching between them would mess with selected context. (From Launcher or Ftrack). - In that case outputs might be output under wrong asset! + In that case outputs might be output under wrong folder! - Repair action will use Context asset value (from Workfiles or Launcher) + Repair action will use Context folder value (from Workfiles or Launcher) Closing and reopening with Workfiles will refresh Context value. """ - label = "Validate Instance Asset" + label = "Validate Instance Folder" hosts = ["aftereffects"] - actions = [ValidateInstanceAssetRepair] + actions = [ValidateInstanceFolderRepair] order = ValidateContentsOrder def process(self, instance): - instance_asset = instance.data["folderPath"] - current_asset = get_current_asset_name() + instance_folder = instance.data["folderPath"] + current_folder = get_current_folder_path() msg = ( - f"Instance asset {instance_asset} is not the same " - f"as current context {current_asset}." + f"Instance folder {instance_folder} is not the same " + f"as current context {current_folder}." ) - if instance_asset != current_asset: + if instance_folder != current_folder: raise PublishXmlValidationError(self, msg) diff --git a/client/ayon_core/hosts/aftereffects/plugins/publish/validate_scene_settings.py b/client/ayon_core/hosts/aftereffects/plugins/publish/validate_scene_settings.py index 0a90ae2a5a..6375f5cc61 100644 --- a/client/ayon_core/hosts/aftereffects/plugins/publish/validate_scene_settings.py +++ b/client/ayon_core/hosts/aftereffects/plugins/publish/validate_scene_settings.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Validate scene settings. Requires: - instance -> assetEntity + instance -> folderEntity instance -> anatomyData """ import os @@ -13,7 +13,7 @@ from ayon_core.pipeline import ( PublishXmlValidationError, OptionalPyblishPluginMixin ) -from ayon_core.hosts.aftereffects.api import get_asset_settings +from ayon_core.hosts.aftereffects.api import get_folder_settings class ValidateSceneSettings(OptionalPyblishPluginMixin, @@ -48,7 +48,7 @@ class ValidateSceneSettings(OptionalPyblishPluginMixin, fps handleStart handleEnd - skip_resolution_check - fill entity type ('asset') to skip validation + skip_resolution_check - fill entity type ('folder') to skip validation resolutionWidth resolutionHeight TODO support in extension is missing for now @@ -71,11 +71,11 @@ class ValidateSceneSettings(OptionalPyblishPluginMixin, if not self.is_active(instance.data): return - asset_doc = instance.data["assetEntity"] - expected_settings = get_asset_settings(asset_doc) + folder_entity = instance.data["folderEntity"] + expected_settings = get_folder_settings(folder_entity) self.log.info("config from DB::{}".format(expected_settings)) - task_name = instance.data["anatomyData"]["task"]["name"] + task_name = instance.data["task"] if any(re.search(pattern, task_name) for pattern in self.skip_resolution_check): expected_settings.pop("resolutionWidth") diff --git a/client/ayon_core/hosts/blender/addon.py b/client/ayon_core/hosts/blender/addon.py index b7484de243..6a4b325365 100644 --- a/client/ayon_core/hosts/blender/addon.py +++ b/client/ayon_core/hosts/blender/addon.py @@ -55,8 +55,7 @@ class BlenderAddon(AYONAddon, IHostAddon): ) # Define Qt binding if not defined - if not env.get("QT_PREFERRED_BINDING"): - env["QT_PREFERRED_BINDING"] = "PySide2" + env.pop("QT_PREFERRED_BINDING", None) def get_launch_hook_paths(self, app): if app.host_name != self.host_name: diff --git a/client/ayon_core/hosts/blender/api/ops.py b/client/ayon_core/hosts/blender/api/ops.py index dcbc44bcad..c03ec98d0c 100644 --- a/client/ayon_core/hosts/blender/api/ops.py +++ b/client/ayon_core/hosts/blender/api/ops.py @@ -16,7 +16,7 @@ import bpy import bpy.utils.previews from ayon_core import style -from ayon_core.pipeline import get_current_asset_name, get_current_task_name +from ayon_core.pipeline import get_current_folder_path, get_current_task_name from ayon_core.tools.utils import host_tools from .workio import OpenFileCacher @@ -191,7 +191,7 @@ def _process_app_events() -> Optional[float]: class LaunchQtApp(bpy.types.Operator): - """A Base class for opertors to launch a Qt app.""" + """A Base class for operators to launch a Qt app.""" _app: QtWidgets.QApplication _window = Union[QtWidgets.QDialog, ModuleType] @@ -355,7 +355,7 @@ class SetFrameRange(bpy.types.Operator): bl_label = "Set Frame Range" def execute(self, context): - data = pipeline.get_asset_data() + data = pipeline.get_folder_attributes() pipeline.set_frame_range(data) return {"FINISHED"} @@ -365,7 +365,7 @@ class SetResolution(bpy.types.Operator): bl_label = "Set Resolution" def execute(self, context): - data = pipeline.get_asset_data() + data = pipeline.get_folder_attributes() pipeline.set_resolution(data) return {"FINISHED"} @@ -388,9 +388,9 @@ class TOPBAR_MT_avalon(bpy.types.Menu): else: pyblish_menu_icon_id = 0 - asset = get_current_asset_name() - task = get_current_task_name() - context_label = f"{asset}, {task}" + folder_path = get_current_folder_path() + task_name = get_current_task_name() + context_label = f"{folder_path}, {task_name}" context_label_item = layout.row() context_label_item.operator( LaunchWorkFiles.bl_idname, text=context_label diff --git a/client/ayon_core/hosts/blender/api/pipeline.py b/client/ayon_core/hosts/blender/api/pipeline.py index fcac285f74..84e78d0883 100644 --- a/client/ayon_core/hosts/blender/api/pipeline.py +++ b/client/ayon_core/hosts/blender/api/pipeline.py @@ -9,6 +9,7 @@ from . import lib from . import ops import pyblish.api +import ayon_api from ayon_core.host import ( HostBase, @@ -16,11 +17,10 @@ from ayon_core.host import ( IPublishHost, ILoadHost ) -from ayon_core.client import get_asset_by_name from ayon_core.pipeline import ( schema, get_current_project_name, - get_current_asset_name, + get_current_folder_path, register_loader_plugin_path, register_creator_plugin_path, deregister_loader_plugin_path, @@ -221,12 +221,12 @@ def message_window(title, message): _process_app_events() -def get_asset_data(): +def get_folder_attributes(): project_name = get_current_project_name() - asset_name = get_current_asset_name() - asset_doc = get_asset_by_name(project_name, asset_name) + folder_path = get_current_folder_path() + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - return asset_doc.get("data") + return folder_entity["attrib"] def set_frame_range(data): @@ -279,7 +279,7 @@ def on_new(): set_resolution_startup = settings.get("set_resolution_startup") set_frames_startup = settings.get("set_frames_startup") - data = get_asset_data() + data = get_folder_attributes() if set_resolution_startup: set_resolution(data) @@ -300,7 +300,7 @@ def on_open(): set_resolution_startup = settings.get("set_resolution_startup") set_frames_startup = settings.get("set_frames_startup") - data = get_asset_data() + data = get_folder_attributes() if set_resolution_startup: set_resolution(data) @@ -468,7 +468,7 @@ def containerise(name: str, """ - node_name = f"{context['asset']['name']}_{name}" + node_name = f"{context['folder']['name']}_{name}" if namespace: node_name = f"{namespace}:{node_name}" if suffix: @@ -484,7 +484,7 @@ def containerise(name: str, "name": name, "namespace": namespace or '', "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } metadata_update(container, data) @@ -523,7 +523,7 @@ def containerise_existing( "name": name, "namespace": namespace or '', "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } metadata_update(container, data) diff --git a/client/ayon_core/hosts/blender/api/plugin.py b/client/ayon_core/hosts/blender/api/plugin.py index 5f9cb4a830..6c9bfb6569 100644 --- a/client/ayon_core/hosts/blender/api/plugin.py +++ b/client/ayon_core/hosts/blender/api/plugin.py @@ -49,7 +49,7 @@ def prepare_scene_name( def get_unique_number( folder_name: str, product_name: str ) -> str: - """Return a unique number based on the asset name.""" + """Return a unique number based on the folder name.""" avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) if not avalon_container: return "01" @@ -220,9 +220,9 @@ class BaseCreator(Creator): Create new instance and store it. Args: - product_name(str): Subset name of created instance. - instance_data(dict): Instance base data. - pre_create_data(dict): Data based on pre creation attributes. + product_name (str): Product name of created instance. + instance_data (dict): Instance base data. + pre_create_data (dict): Data based on pre creation attributes. Those may affect how creator works. """ # Get Instance Container or create it if it does not exist @@ -232,9 +232,9 @@ class BaseCreator(Creator): bpy.context.scene.collection.children.link(instances) # Create asset group - asset_name = instance_data["folderPath"].split("/")[-1] + folder_name = instance_data["folderPath"].split("/")[-1] - name = prepare_scene_name(asset_name, product_name) + name = prepare_scene_name(folder_name, product_name) if self.create_as_asset_group: # Create instance as empty instance_node = bpy.data.objects.new(name=name, object_data=None) @@ -312,9 +312,9 @@ class BaseCreator(Creator): "productName" in changes.changed_keys or "folderPath" in changes.changed_keys ) and created_instance.product_type != "workfile": - asset_name = data["folderPath"].split("/")[-1] + folder_name = data["folderPath"].split("/")[-1] name = prepare_scene_name( - asset_name, data["productName"] + folder_name, data["productName"] ) node.name = name @@ -346,7 +346,7 @@ class BaseCreator(Creator): """Fill instance data with required items. Args: - product_name(str): Subset name of created instance. + product_name(str): Product name of created instance. instance_data(dict): Instance base data. instance_node(bpy.types.ID): Instance node in blender scene. """ @@ -465,8 +465,8 @@ class AssetLoader(LoaderPlugin): filepath = self.filepath_from_context(context) assert Path(filepath).exists(), f"{filepath} doesn't exist." - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] unique_number = get_unique_number( folder_name, product_name ) @@ -498,21 +498,21 @@ class AssetLoader(LoaderPlugin): # loader=self.__class__.__name__, # ) - # folder_name = context["asset"]["name"] - # product_name = context["subset"]["name"] + # folder_name = context["folder"]["name"] + # product_name = context["product"]["name"] # instance_name = prepare_scene_name( # folder_name, product_name, unique_number # ) + '_CON' # return self._get_instance_collection(instance_name, nodes) - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Must be implemented by a sub-class""" raise NotImplementedError("Must be implemented by a sub-class") - def update(self, container: Dict, representation: Dict): + def update(self, container: Dict, context: Dict): """ Run the update on Blender main thread""" - mti = MainThreadItem(self.exec_update, container, representation) + mti = MainThreadItem(self.exec_update, container, context) execute_in_main_thread(mti) def exec_remove(self, container: Dict) -> bool: diff --git a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py index 00b297f998..9041ef7309 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py +++ b/client/ayon_core/hosts/blender/hooks/pre_add_run_python_script_arg.py @@ -1,6 +1,6 @@ from pathlib import Path -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class AddPythonScriptToLaunchArgs(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py index c80a1bd669..87a4f5cfad 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/blender/hooks/pre_pyside_install.py @@ -2,7 +2,7 @@ import os import re import subprocess from platform import system -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToBlender(PreLaunchHook): @@ -31,7 +31,7 @@ class InstallPySideToBlender(PreLaunchHook): def inner_execute(self): # Get blender's python directory - version_regex = re.compile(r"^[2-4]\.[0-9]+$") + version_regex = re.compile(r"^([2-4])\.[0-9]+$") platform = system().lower() executable = self.launch_context.executable.executable_path @@ -42,7 +42,8 @@ class InstallPySideToBlender(PreLaunchHook): if os.path.basename(executable).lower() != expected_executable: self.log.info(( f"Executable does not lead to {expected_executable} file." - "Can't determine blender's python to check/install PySide2." + "Can't determine blender's python to check/install" + " Qt binding." )) return @@ -73,6 +74,15 @@ class InstallPySideToBlender(PreLaunchHook): return version_subfolder = version_subfolders[0] + before_blender_4 = False + if int(version_regex.match(version_subfolder).group(1)) < 4: + before_blender_4 = True + # Blender 4 has Python 3.11 which does not support 'PySide2' + # QUESTION could we always install PySide6? + qt_binding = "PySide2" if before_blender_4 else "PySide6" + # Use PySide6 6.6.3 because 6.7.0 had a bug + # - 'QTextEdit' can't be added to 'QBoxLayout' + qt_binding_version = None if before_blender_4 else "6.6.3" python_dir = os.path.join(versions_dir, version_subfolder, "python") python_lib = os.path.join(python_dir, "lib") @@ -116,22 +126,41 @@ class InstallPySideToBlender(PreLaunchHook): return # Check if PySide2 is installed and skip if yes - if self.is_pyside_installed(python_executable): + if self.is_pyside_installed(python_executable, qt_binding): self.log.debug("Blender has already installed PySide2.") return # Install PySide2 in blender's python if platform == "windows": - result = self.install_pyside_windows(python_executable) + result = self.install_pyside_windows( + python_executable, + qt_binding, + qt_binding_version, + before_blender_4, + ) else: - result = self.install_pyside(python_executable) + result = self.install_pyside( + python_executable, + qt_binding, + qt_binding_version, + ) if result: - self.log.info("Successfully installed PySide2 module to blender.") + self.log.info( + f"Successfully installed {qt_binding} module to blender." + ) else: - self.log.warning("Failed to install PySide2 module to blender.") + self.log.warning( + f"Failed to install {qt_binding} module to blender." + ) - def install_pyside_windows(self, python_executable): + def install_pyside_windows( + self, + python_executable, + qt_binding, + qt_binding_version, + before_blender_4, + ): """Install PySide2 python module to blender's python. Installation requires administration rights that's why it is required @@ -139,7 +168,6 @@ class InstallPySideToBlender(PreLaunchHook): administration rights. """ try: - import win32api import win32con import win32process import win32event @@ -150,12 +178,37 @@ class InstallPySideToBlender(PreLaunchHook): self.log.warning("Couldn't import \"pywin32\" modules") return + if qt_binding_version: + qt_binding = f"{qt_binding}=={qt_binding_version}" + try: # Parameters # - use "-m pip" as module pip to install PySide2 and argument # "--ignore-installed" is to force install module to blender's # site-packages and make sure it is binary compatible - parameters = "-m pip install --ignore-installed PySide2" + fake_exe = "fake.exe" + site_packages_prefix = os.path.dirname( + os.path.dirname(python_executable) + ) + args = [ + fake_exe, + "-m", + "pip", + "install", + "--ignore-installed", + qt_binding, + ] + if not before_blender_4: + # Define prefix for site package + # Python in blender 4.x is installing packages in AppData and + # not in blender's directory. + args.extend(["--prefix", site_packages_prefix]) + + parameters = ( + subprocess.list2cmdline(args) + .lstrip(fake_exe) + .lstrip(" ") + ) # Execute command and ask for administrator's rights process_info = ShellExecuteEx( @@ -173,20 +226,29 @@ class InstallPySideToBlender(PreLaunchHook): except pywintypes.error: pass - def install_pyside(self, python_executable): - """Install PySide2 python module to blender's python.""" + def install_pyside( + self, + python_executable, + qt_binding, + qt_binding_version, + ): + """Install Qt binding python module to blender's python.""" + if qt_binding_version: + qt_binding = f"{qt_binding}=={qt_binding_version}" try: # Parameters - # - use "-m pip" as module pip to install PySide2 and argument + # - use "-m pip" as module pip to install qt binding and argument # "--ignore-installed" is to force install module to blender's # site-packages and make sure it is binary compatible + # TODO find out if blender 4.x on linux/darwin does install + # qt binding to correct place. args = [ python_executable, "-m", "pip", "install", "--ignore-installed", - "PySide2", + qt_binding, ] process = subprocess.Popen( args, stdout=subprocess.PIPE, universal_newlines=True @@ -203,13 +265,15 @@ class InstallPySideToBlender(PreLaunchHook): except subprocess.SubprocessError: pass - def is_pyside_installed(self, python_executable): + def is_pyside_installed(self, python_executable, qt_binding): """Check if PySide2 module is in blender's pip list. Check that PySide2 is installed directly in blender's site-packages. It is possible that it is installed in user's site-packages but that may be incompatible with blender's python. """ + + qt_binding_low = qt_binding.lower() # Get pip list from blender's python executable args = [python_executable, "-m", "pip", "list"] process = subprocess.Popen(args, stdout=subprocess.PIPE) @@ -226,6 +290,6 @@ class InstallPySideToBlender(PreLaunchHook): if not line: continue package_name = line[0:package_len].strip() - if package_name.lower() == "pyside2": + if package_name.lower() == qt_binding_low: return True return False diff --git a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py index e3a8593cd9..47303a7af4 100644 --- a/client/ayon_core/hosts/blender/hooks/pre_windows_console.py +++ b/client/ayon_core/hosts/blender/hooks/pre_windows_console.py @@ -1,5 +1,5 @@ import subprocess -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class BlenderConsoleWindows(PreLaunchHook): diff --git a/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py b/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py index 65a5a4a9b6..613574eee0 100644 --- a/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/blender/plugins/create/convert_legacy.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- """Converter for legacy Houdini products.""" -from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin +from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin from ayon_core.hosts.blender.api.lib import imprint -class BlenderLegacyConvertor(SubsetConvertorPlugin): +class BlenderLegacyConvertor(ProductConvertorPlugin): """Find and convert any legacy products in the scene. This Converter will find all legacy products in the scene and will diff --git a/client/ayon_core/hosts/blender/plugins/create/create_workfile.py b/client/ayon_core/hosts/blender/plugins/create/create_workfile.py index ead3ed7749..296a03e317 100644 --- a/client/ayon_core/hosts/blender/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/blender/plugins/create/create_workfile.py @@ -1,7 +1,7 @@ import bpy +import ayon_api from ayon_core.pipeline import CreatedInstance, AutoCreator -from ayon_core.client import get_asset_by_name from ayon_core.hosts.blender.api.plugin import BaseCreator from ayon_core.hosts.blender.api.pipeline import ( AVALON_PROPERTY, @@ -33,33 +33,38 @@ class CreateWorkfile(BaseCreator, AutoCreator): ) project_name = self.project_name - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() task_name = self.create_context.get_current_task_name() host_name = self.create_context.host_name - existing_asset_name = None + existing_folder_path = None if workfile_instance is not None: - existing_asset_name = workfile_instance.get("folderPath") + existing_folder_path = workfile_instance.get("folderPath") if not workfile_instance: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, task_name, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": task_name, } data.update( self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, task_name, host_name, workfile_instance, @@ -72,20 +77,25 @@ class CreateWorkfile(BaseCreator, AutoCreator): self._add_instance_to_context(workfile_instance) elif ( - existing_asset_name != asset_name + existing_folder_path != folder_path or workfile_instance["task"] != task_name ): # Update instance context if it's different - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) - workfile_instance["folderPath"] = asset_name + workfile_instance["folderPath"] = folder_path workfile_instance["task"] = task_name workfile_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/blender/plugins/load/import_workfile.py b/client/ayon_core/hosts/blender/plugins/load/import_workfile.py index 5a801da848..d2e58c7752 100644 --- a/client/ayon_core/hosts/blender/plugins/load/import_workfile.py +++ b/client/ayon_core/hosts/blender/plugins/load/import_workfile.py @@ -4,8 +4,8 @@ from ayon_core.hosts.blender.api import plugin def append_workfile(context, fname, do_import): - folder_name = context['asset']['name'] - product_name = context['subset']['name'] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] group_name = plugin.prepare_scene_name(folder_name, product_name) @@ -43,8 +43,8 @@ class AppendBlendLoader(plugin.AssetLoader): so you could also use it as a new base. """ - representations = ["blend"] - families = ["workfile"] + representations = {"blend"} + product_types = {"workfile"} label = "Append Workfile" order = 9 @@ -68,8 +68,8 @@ class ImportBlendLoader(plugin.AssetLoader): so you could also use it as a new base. """ - representations = ["blend"] - families = ["workfile"] + representations = {"blend"} + product_types = {"workfile"} label = "Import Workfile" order = 9 diff --git a/client/ayon_core/hosts/blender/plugins/load/load_abc.py b/client/ayon_core/hosts/blender/plugins/load/load_abc.py index 4fa9881376..c074b5ed13 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_abc.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_abc.py @@ -26,8 +26,8 @@ class CacheModelLoader(plugin.AssetLoader): Note: At least for now it only supports Alembic files. """ - families = ["model", "pointcache", "animation"] - representations = ["abc"] + product_types = {"model", "pointcache", "animation"} + representations = {"abc"} label = "Load Alembic" icon = "code-fork" @@ -134,8 +134,8 @@ class CacheModelLoader(plugin.AssetLoader): """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] asset_name = plugin.prepare_scene_name(folder_name, product_name) unique_number = plugin.get_unique_number(folder_name, product_name) @@ -161,17 +161,17 @@ class CacheModelLoader(plugin.AssetLoader): self._link_objects(objects, asset_group, containers, asset_group) - product_type = context["subset"]["data"]["family"] + product_type = context["product"]["productType"] asset_group[AVALON_PROPERTY] = { "schema": "openpype:container-2.0", "id": AVALON_CONTAINER_ID, "name": name, "namespace": namespace or '', "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), + "parent": context["representation"]["versionId"], "productType": product_type, "objectName": group_name } @@ -179,7 +179,7 @@ class CacheModelLoader(plugin.AssetLoader): self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -191,15 +191,16 @@ class CacheModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_entity = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_entity)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert asset_group, ( @@ -244,7 +245,7 @@ class CacheModelLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = repre_entity["id"] def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_action.py b/client/ayon_core/hosts/blender/plugins/load/load_action.py index 61ea996b2a..8135df042a 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_action.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_action.py @@ -24,8 +24,8 @@ class BlendActionLoader(plugin.AssetLoader): moment. """ - families = ["action"] - representations = ["blend"] + product_types = {"action"} + representations = {"blend"} label = "Link Action" icon = "code-fork" @@ -44,8 +44,8 @@ class BlendActionLoader(plugin.AssetLoader): """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] lib_container = plugin.prepare_scene_name(folder_name, product_name) container_name = plugin.prepare_scene_name( folder_name, product_name, namespace @@ -114,7 +114,7 @@ class BlendActionLoader(plugin.AssetLoader): self[:] = nodes return nodes - def update(self, container: Dict, representation: Dict): + def update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -126,18 +126,18 @@ class BlendActionLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ - + repre_entity = context["representation"] collection = bpy.data.collections.get( container["objectName"] ) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_entity)) extension = libpath.suffix.lower() logger.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert collection, ( @@ -241,7 +241,7 @@ class BlendActionLoader(plugin.AssetLoader): # Save the list of objects in the metadata container collection_metadata["objects"] = objects_list collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + collection_metadata["representation"] = repre_entity["id"] bpy.ops.object.select_all(action='DESELECT') diff --git a/client/ayon_core/hosts/blender/plugins/load/load_animation.py b/client/ayon_core/hosts/blender/plugins/load/load_animation.py index b805790c28..c9f3b33a6f 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_animation.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_animation.py @@ -16,8 +16,8 @@ class BlendAnimationLoader(plugin.AssetLoader): moment. """ - families = ["animation"] - representations = ["blend"] + product_types = {"animation"} + representations = {"blend"} label = "Link Animation" icon = "code-fork" diff --git a/client/ayon_core/hosts/blender/plugins/load/load_audio.py b/client/ayon_core/hosts/blender/plugins/load/load_audio.py index 023a987d63..3d2f412e2b 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_audio.py @@ -20,8 +20,8 @@ from ayon_core.hosts.blender.api.pipeline import ( class AudioLoader(plugin.AssetLoader): """Load audio in Blender.""" - families = ["audio"] - representations = ["wav"] + product_types = {"audio"} + representations = {"wav"} label = "Load Audio" icon = "volume-up" @@ -39,8 +39,8 @@ class AudioLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] asset_name = plugin.prepare_scene_name(folder_name, product_name) unique_number = plugin.get_unique_number(folder_name, product_name) @@ -83,11 +83,11 @@ class AudioLoader(plugin.AssetLoader): "name": name, "namespace": namespace or '', "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), - "productType": context["subset"]["data"]["family"], + "parent": context["representation"]["versionId"], + "productType": context["product"]["productType"], "objectName": group_name, "audio": audio } @@ -96,7 +96,7 @@ class AudioLoader(plugin.AssetLoader): self[:] = objects return [objects] - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update an audio strip in the sequence editor. Arguments: @@ -105,14 +105,15 @@ class AudioLoader(plugin.AssetLoader): representation (openpype:representation-1.0): Representation to update, from `host.ls()`. """ + repre_entity = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_entity)) self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert asset_group, ( @@ -175,8 +176,8 @@ class AudioLoader(plugin.AssetLoader): window_manager.windows[-1].screen.areas[0].type = old_type metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) - metadata["parent"] = str(representation["parent"]) + metadata["representation"] = repre_entity["id"] + metadata["parent"] = repre_entity["versionId"] metadata["audio"] = new_audio def exec_remove(self, container: Dict) -> bool: diff --git a/client/ayon_core/hosts/blender/plugins/load/load_blend.py b/client/ayon_core/hosts/blender/plugins/load/load_blend.py index 84a4bd4398..f9377d615c 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_blend.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_blend.py @@ -20,8 +20,8 @@ from ayon_core.hosts.blender.api.pipeline import ( class BlendLoader(plugin.AssetLoader): """Load assets from a .blend file.""" - families = ["model", "rig", "layout", "camera"] - representations = ["blend"] + product_types = {"model", "rig", "layout", "camera"} + representations = {"blend"} label = "Append Blend" icon = "code-fork" @@ -127,15 +127,15 @@ class BlendLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] try: - product_type = context["subset"]["data"]["family"] + product_type = context["product"]["productType"] except ValueError: product_type = "model" - representation = str(context["representation"]["_id"]) + representation = context["representation"]["id"] asset_name = plugin.prepare_scene_name(folder_name, product_name) unique_number = plugin.get_unique_number(folder_name, product_name) @@ -162,11 +162,11 @@ class BlendLoader(plugin.AssetLoader): "name": name, "namespace": namespace or '', "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), - "productType": context["subset"]["data"]["family"], + "parent": context["representation"]["versionId"], + "productType": context["product"]["productType"], "objectName": group_name, "members": members, } @@ -181,13 +181,14 @@ class BlendLoader(plugin.AssetLoader): self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """ Update the loaded asset. """ + repre_entity = context["representation"] group_name = container["objectName"] asset_group = bpy.data.objects.get(group_name) - libpath = Path(get_representation_path(representation)).as_posix() + libpath = Path(get_representation_path(repre_entity)).as_posix() assert asset_group, ( f"The asset is not loaded: {container['objectName']}" @@ -226,7 +227,7 @@ class BlendLoader(plugin.AssetLoader): obj.animation_data_create() obj.animation_data.action = actions[obj.name] - # Restore the old data, but reset memebers, as they don't exist anymore + # Restore the old data, but reset members, as they don't exist anymore # This avoids a crash, because the memory addresses of those members # are not valid anymore old_data["members"] = [] @@ -234,8 +235,8 @@ class BlendLoader(plugin.AssetLoader): new_data = { "libpath": libpath, - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], "members": members, } diff --git a/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py b/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py index ed9dcdeb09..f91d828d83 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_blendscene.py @@ -18,8 +18,8 @@ from ayon_core.hosts.blender.api.pipeline import ( class BlendSceneLoader(plugin.AssetLoader): """Load assets from a .blend file.""" - families = ["blendScene"] - representations = ["blend"] + product_types = {"blendScene"} + representations = {"blend"} label = "Append Blend" icon = "code-fork" @@ -82,11 +82,11 @@ class BlendSceneLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] try: - product_type = context["subset"]["data"]["family"] + product_type = context["product"]["productType"] except ValueError: product_type = "model" @@ -114,11 +114,11 @@ class BlendSceneLoader(plugin.AssetLoader): "name": name, "namespace": namespace or '', "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), - "productType": context["subset"]["data"]["family"], + "parent": context["representation"]["versionId"], + "productType": context["product"]["productType"], "objectName": group_name, "members": members, } @@ -133,13 +133,14 @@ class BlendSceneLoader(plugin.AssetLoader): self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """ Update the loaded asset. """ + repre_entity = context["representation"] group_name = container["objectName"] asset_group = bpy.data.collections.get(group_name) - libpath = Path(get_representation_path(representation)).as_posix() + libpath = Path(get_representation_path(repre_entity)).as_posix() assert asset_group, ( f"The asset is not loaded: {container['objectName']}" @@ -201,8 +202,8 @@ class BlendSceneLoader(plugin.AssetLoader): new_data = { "libpath": libpath, - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], "members": members, } diff --git a/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py b/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py index 65c73b4168..6178578081 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_camera_abc.py @@ -23,8 +23,8 @@ class AbcCameraLoader(plugin.AssetLoader): Stores the imported asset in an empty named after the asset. """ - families = ["camera"] - representations = ["abc"] + product_types = {"camera"} + representations = {"abc"} label = "Load Camera (ABC)" icon = "code-fork" @@ -84,8 +84,8 @@ class AbcCameraLoader(plugin.AssetLoader): libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] asset_name = plugin.prepare_scene_name(folder_name, product_name) unique_number = plugin.get_unique_number(folder_name, product_name) @@ -119,18 +119,18 @@ class AbcCameraLoader(plugin.AssetLoader): "name": name, "namespace": namespace or "", "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), - "productType": context["subset"]["data"]["family"], + "parent": context["representation"]["versionId"], + "productType": context["product"]["productType"], "objectName": group_name, } self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -142,15 +142,16 @@ class AbcCameraLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_entity = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_entity)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert asset_group, ( @@ -185,7 +186,7 @@ class AbcCameraLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = repre_entity["id"] def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py b/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py index 3e5a4e6e75..a510d42850 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_camera_fbx.py @@ -23,8 +23,8 @@ class FbxCameraLoader(plugin.AssetLoader): Stores the imported asset in an empty named after the asset. """ - families = ["camera"] - representations = ["fbx"] + product_types = {"camera"} + representations = {"fbx"} label = "Load Camera (FBX)" icon = "code-fork" @@ -87,8 +87,8 @@ class FbxCameraLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] asset_name = plugin.prepare_scene_name(folder_name, product_name) unique_number = plugin.get_unique_number(folder_name, product_name) @@ -122,18 +122,18 @@ class FbxCameraLoader(plugin.AssetLoader): "name": name, "namespace": namespace or '', "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), - "productType": context["subset"]["data"]["family"], + "parent": context["representation"]["versionId"], + "productType": context["product"]["productType"], "objectName": group_name } self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -145,15 +145,16 @@ class FbxCameraLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_entity = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_entity)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert asset_group, ( @@ -195,7 +196,7 @@ class FbxCameraLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = repre_entity["id"] def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_fbx.py b/client/ayon_core/hosts/blender/plugins/load/load_fbx.py index e9d5522568..e323d49dea 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_fbx.py @@ -23,8 +23,8 @@ class FbxModelLoader(plugin.AssetLoader): Stores the imported asset in an empty named after the asset. """ - families = ["model", "rig"] - representations = ["fbx"] + product_types = {"model", "rig"} + representations = {"fbx"} label = "Load FBX" icon = "code-fork" @@ -131,8 +131,8 @@ class FbxModelLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] asset_name = plugin.prepare_scene_name(folder_name, product_name) unique_number = plugin.get_unique_number(folder_name, product_name) @@ -166,18 +166,18 @@ class FbxModelLoader(plugin.AssetLoader): "name": name, "namespace": namespace or '', "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), - "productType": context["subset"]["data"]["family"], + "parent": context["representation"]["versionId"], + "productType": context["product"]["productType"], "objectName": group_name } self[:] = objects return objects - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -189,15 +189,16 @@ class FbxModelLoader(plugin.AssetLoader): Warning: No nested collections are supported at the moment! """ + repre_entity = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_entity)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert asset_group, ( @@ -250,7 +251,7 @@ class FbxModelLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = repre_entity["id"] def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py index 126291464b..d20eaad9fc 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_layout_json.py @@ -26,8 +26,8 @@ from ayon_core.hosts.blender.api import plugin class JsonLayoutLoader(plugin.AssetLoader): """Load layout published from Unreal.""" - families = ["layout"] - representations = ["json"] + product_types = {"layout"} + representations = {"json"} label = "Load Layout" icon = "code-fork" @@ -132,7 +132,7 @@ class JsonLayoutLoader(plugin.AssetLoader): # # name=f"{unique_number}_{product[name]}_animation", # asset=asset, # options={"useSelection": False} - # # data={"dependencies": str(context["representation"]["_id"])} + # # data={"dependencies": context["representation"]["id"]} # ) def process_asset(self, @@ -148,8 +148,8 @@ class JsonLayoutLoader(plugin.AssetLoader): options: Additional settings dictionary """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] asset_name = plugin.prepare_scene_name(folder_name, product_name) unique_number = plugin.get_unique_number(folder_name, product_name) @@ -167,7 +167,7 @@ class JsonLayoutLoader(plugin.AssetLoader): asset_group.empty_display_type = 'SINGLE_ARROW' avalon_container.objects.link(asset_group) - self._process(libpath, asset, asset_group, None) + self._process(libpath, asset_name, asset_group, None) bpy.context.scene.collection.objects.link(asset_group) @@ -177,18 +177,18 @@ class JsonLayoutLoader(plugin.AssetLoader): "name": name, "namespace": namespace or '', "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "libpath": libpath, "asset_name": asset_name, - "parent": str(context["representation"]["parent"]), - "productType": context["subset"]["data"]["family"], + "parent": context["representation"]["versionId"], + "productType": context["product"]["productType"], "objectName": group_name } self[:] = asset_group.children return asset_group.children - def exec_update(self, container: Dict, representation: Dict): + def exec_update(self, container: Dict, context: Dict): """Update the loaded asset. This will remove all objects of the current collection, load the new @@ -197,15 +197,16 @@ class JsonLayoutLoader(plugin.AssetLoader): will not be removed, only unlinked. Normally this should not be the case though. """ + repre_entity = context["representation"] object_name = container["objectName"] asset_group = bpy.data.objects.get(object_name) - libpath = Path(get_representation_path(representation)) + libpath = Path(get_representation_path(repre_entity)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert asset_group, ( @@ -269,7 +270,7 @@ class JsonLayoutLoader(plugin.AssetLoader): asset_group.matrix_basis = mat metadata["libpath"] = str(libpath) - metadata["representation"] = str(representation["_id"]) + metadata["representation"] = repre_entity["id"] def exec_remove(self, container: Dict) -> bool: """Remove an existing container from a Blender scene. diff --git a/client/ayon_core/hosts/blender/plugins/load/load_look.py b/client/ayon_core/hosts/blender/plugins/load/load_look.py index 27632f5705..75401f94ec 100644 --- a/client/ayon_core/hosts/blender/plugins/load/load_look.py +++ b/client/ayon_core/hosts/blender/plugins/load/load_look.py @@ -23,8 +23,8 @@ class BlendLookLoader(plugin.AssetLoader): contains the model. There is no further need to 'containerise' it. """ - families = ["look"] - representations = ["json"] + product_types = {"look"} + representations = {"json"} label = "Load Look" icon = "code-fork" @@ -93,8 +93,8 @@ class BlendLookLoader(plugin.AssetLoader): """ libpath = self.filepath_from_context(context) - folder_name = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] lib_container = plugin.prepare_scene_name( folder_name, product_name @@ -130,23 +130,24 @@ class BlendLookLoader(plugin.AssetLoader): metadata["objects"] = objects metadata["materials"] = materials - metadata["parent"] = str(context["representation"]["parent"]) - metadata["product_type"] = context["subset"]["data"]["family"] + metadata["parent"] = context["representation"]["versionId"] + metadata["product_type"] = context["product"]["productType"] nodes = list(container.objects) nodes.append(container) self[:] = nodes return nodes - def update(self, container: Dict, representation: Dict): + def update(self, container: Dict, context: Dict): collection = bpy.data.collections.get(container["objectName"]) - libpath = Path(get_representation_path(representation)) + repre_entity = context["representation"] + libpath = Path(get_representation_path(repre_entity)) extension = libpath.suffix.lower() self.log.info( "Container: %s\nRepresentation: %s", pformat(container, indent=2), - pformat(representation, indent=2), + pformat(repre_entity, indent=2), ) assert collection, ( @@ -201,7 +202,7 @@ class BlendLookLoader(plugin.AssetLoader): collection_metadata["objects"] = objects collection_metadata["materials"] = materials collection_metadata["libpath"] = str(libpath) - collection_metadata["representation"] = str(representation["_id"]) + collection_metadata["representation"] = repre_entity["id"] def remove(self, container: Dict) -> bool: collection = bpy.data.collections.get(container["objectName"]) diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py b/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py index cf753637ea..6590be515c 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_abc.py @@ -2,6 +2,7 @@ import os import bpy +from ayon_core.lib import BoolDef from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import plugin @@ -17,9 +18,11 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin): if not self.is_active(instance.data): return + attr_values = self.get_attr_values_from_data(instance.data) + # Define extract output file path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.abc" @@ -46,7 +49,8 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin): bpy.ops.wm.alembic_export( filepath=filepath, selected=True, - flatten=False + flatten=False, + subdiv_schema=attr_values.get("subdiv_schema", False) ) plugin.deselect_all() @@ -65,6 +69,21 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin): self.log.debug("Extracted instance '%s' to: %s", instance.name, representation) + @classmethod + def get_attribute_defs(cls): + return [ + BoolDef( + "subdiv_schema", + label="Alembic Mesh Subdiv Schema", + tooltip="Export Meshes using Alembic's subdivision schema.\n" + "Enabling this includes creases with the export but " + "excludes the mesh's normals.\n" + "Enabling this usually result in smaller file size " + "due to lack of normals.", + default=False + ) + ] + class ExtractModelABC(ExtractABC): """Extract model as ABC.""" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py b/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py index 0086dccd67..f33af13282 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_abc_animation.py @@ -23,7 +23,7 @@ class ExtractAnimationABC( # Define extract output file path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.abc" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py b/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py index dd2e33df80..19fe9c6271 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_blend.py @@ -13,6 +13,9 @@ class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin): families = ["model", "camera", "rig", "action", "layout", "blendScene"] optional = True + # From settings + compress = False + def process(self, instance): if not self.is_active(instance.data): return @@ -20,7 +23,7 @@ class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.blend" @@ -53,7 +56,7 @@ class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin): if node.image and node.image.packed_file is None: node.image.pack() - bpy.data.libraries.write(filepath, data_blocks) + bpy.data.libraries.write(filepath, data_blocks, compress=self.compress) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py b/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py index da663b46ea..315fbb19af 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_blend_animation.py @@ -16,6 +16,9 @@ class ExtractBlendAnimation( families = ["animation"] optional = True + # From settings + compress = False + def process(self, instance): if not self.is_active(instance.data): return @@ -23,7 +26,7 @@ class ExtractBlendAnimation( # Define extract output file path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.blend" @@ -46,7 +49,7 @@ class ExtractBlendAnimation( data_blocks.add(child.animation_data.action) data_blocks.add(obj) - bpy.data.libraries.write(filepath, data_blocks) + bpy.data.libraries.write(filepath, data_blocks, compress=self.compress) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py index ff14d70696..c60c92dee1 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_abc.py @@ -4,7 +4,6 @@ import bpy from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import plugin -from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin): @@ -21,7 +20,7 @@ class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.abc" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py index 03059f1e13..bcaf9ebc44 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_camera_fbx.py @@ -20,7 +20,7 @@ class ExtractCamera(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.fbx" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py index 8fea077e7c..e6367dbc0d 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx.py @@ -4,7 +4,6 @@ import bpy from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import plugin -from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin): @@ -21,7 +20,7 @@ class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin): # Define extract output file path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" filename = f"{instance_name}.fbx" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py index b98167c741..ae02909152 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_fbx_animation.py @@ -145,7 +145,7 @@ class ExtractAnimationFBX( root.select_set(True) armature.select_set(True) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" fbx_filename = f"{instance_name}_{armature.name}.fbx" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py b/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py index 16c0392070..0679483dd5 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_layout.py @@ -5,7 +5,8 @@ import bpy import bpy_extras import bpy_extras.anim_utils -from ayon_core.client import get_representation_by_name +from ayon_api import get_representations + from ayon_core.pipeline import publish from ayon_core.hosts.blender.api import plugin from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY @@ -134,6 +135,8 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): fbx_count = 0 project_name = instance.context.data["projectName"] + version_ids = set() + filtered_assets = [] for asset in asset_group.children: metadata = asset.get(AVALON_PROPERTY) if not metadata: @@ -146,42 +149,47 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): ) continue + filtered_assets.append((asset, metadata)) + version_ids.add(metadata["parent"]) + + repre_entities = get_representations( + project_name, + representation_names={"blend", "fbx", "abc"}, + version_ids=version_ids, + fields={"id", "versionId", "name"} + ) + repre_mapping_by_version_id = { + version_id: {} + for version_id in version_ids + } + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + repre_mapping_by_version_id[version_id][repre_entity["name"]] = ( + repre_entity + ) + + for asset, metadata in filtered_assets: version_id = metadata["parent"] product_type = metadata.get("product_type") if product_type is None: product_type = metadata["family"] + repres_by_name = repre_mapping_by_version_id[version_id] + self.log.debug("Parent: {}".format(version_id)) - # Get blend reference - blend = get_representation_by_name( - project_name, "blend", version_id, fields=["_id"] - ) - blend_id = None - if blend: - blend_id = blend["_id"] - # Get fbx reference - fbx = get_representation_by_name( - project_name, "fbx", version_id, fields=["_id"] - ) - fbx_id = None - if fbx: - fbx_id = fbx["_id"] - # Get abc reference - abc = get_representation_by_name( - project_name, "abc", version_id, fields=["_id"] - ) - abc_id = None - if abc: - abc_id = abc["_id"] - - json_element = {} - if blend_id: - json_element["reference"] = str(blend_id) - if fbx_id: - json_element["reference_fbx"] = str(fbx_id) - if abc_id: - json_element["reference_abc"] = str(abc_id) - + # Get blend, fbx and abc reference + blend_id = repres_by_name.get("blend", {}).get("id") + fbx_id = repres_by_name.get("fbx", {}).get("id") + abc_id = repres_by_name.get("abc", {}).get("id") + json_element = { + key: value + for key, value in ( + ("reference", blend_id), + ("reference_fbx", fbx_id), + ("reference_abc", abc_id), + ) + if value + } json_element["product_type"] = product_type json_element["instance_name"] = asset.name json_element["asset_name"] = metadata["asset_name"] @@ -228,7 +236,7 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin): json_data.append(json_element) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] instance_name = f"{folder_name}_{product_name}" json_filename = f"{instance_name}.json" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py b/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py index acb09d0d77..ce6f40f967 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_playblast.py @@ -55,7 +55,7 @@ class ExtractPlayblast(publish.Extractor, publish.OptionalPyblishPluginMixin): # get output path stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] filename = f"{folder_name}_{product_name}" diff --git a/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py b/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py index 89168fb9c9..4330c57d99 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/hosts/blender/plugins/publish/extract_thumbnail.py @@ -32,7 +32,7 @@ class ExtractThumbnail(publish.Extractor): return stagingdir = self.staging_dir(instance) - folder_name = instance.data["assetEntity"]["name"] + folder_name = instance.data["folderEntity"]["name"] product_name = instance.data["productName"] filename = f"{folder_name}_{product_name}" diff --git a/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py b/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py index a10144ebf5..5d3a1dac93 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py +++ b/client/ayon_core/hosts/blender/plugins/publish/integrate_animation.py @@ -44,7 +44,7 @@ class IntegrateAnimation( break if not rep: continue - obj_id = rep["representation"]["_id"] + obj_id = rep["representation"]["id"] if obj_id: json_dict["representation_id"] = str(obj_id) diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py b/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py index b37db44cd4..a86e73ba81 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_deadline_publish.py @@ -32,7 +32,7 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin, tree = bpy.context.scene.node_tree output_type = "CompositorNodeOutputFile" output_node = None - # Remove all output nodes that inlcude "AYON" in the name. + # Remove all output nodes that include "AYON" in the name. # There should be only one. for node in tree.nodes: if node.bl_idname == output_type and "AYON" in node.name: diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py b/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py index 6a053eb47b..aa73525555 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_file_saved.py @@ -37,7 +37,8 @@ class ValidateFileSaved(pyblish.api.ContextPlugin, if not context.data["currentFile"]: # File has not been saved at all and has no filename raise PublishValidationError( - "Current file is empty. Save the file before continuing." + "Current workfile has not been saved yet.\n" + "Save the workfile before continuing." ) # Do not validate workfile has unsaved changes if only instances diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py b/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py index 63b7dc7530..fb16bb7f8d 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_mesh_no_negative_scale.py @@ -12,7 +12,7 @@ from ayon_core.pipeline.publish import ( import ayon_core.hosts.blender.api.action -class ValidateMeshNoNegativeScale(pyblish.api.Validator, +class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure that meshes don't have a negative scale.""" diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_model_uv_map1.py b/client/ayon_core/hosts/blender/plugins/publish/validate_model_uv_map1.py new file mode 100644 index 0000000000..752bc5fa58 --- /dev/null +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_model_uv_map1.py @@ -0,0 +1,94 @@ +import inspect +from typing import List + +import bpy + +import pyblish.api + +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError, + RepairAction +) +import ayon_core.hosts.blender.api.action + + +class ValidateModelMeshUvMap1( + pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin, +): + """Validate model mesh uvs are named `map1`. + + This is solely to get them to work nicely for the Maya pipeline. + """ + + order = ValidateContentsOrder + hosts = ["blender"] + families = ["model"] + label = "Mesh UVs named map1" + actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction, + RepairAction] + optional = True + enabled = False + + @classmethod + def get_invalid(cls, instance) -> List: + + invalid = [] + for obj in instance: + if obj.mode != "OBJECT": + cls.log.warning( + f"Mesh object {obj.name} should be in 'OBJECT' mode" + " to be properly checked." + ) + + obj_data = obj.data + if isinstance(obj_data, bpy.types.Mesh): + mesh = obj_data + + # Ignore mesh without UVs + if not mesh.uv_layers: + continue + + # If mesh has map1 all is ok + if mesh.uv_layers.get("map1"): + continue + + cls.log.warning( + f"Mesh object {obj.name} should be in 'OBJECT' mode" + " to be properly checked." + ) + invalid.append(obj) + + return invalid + + @classmethod + def repair(cls, instance): + for obj in cls.get_invalid(instance): + mesh = obj.data + + # Rename the first UV set to map1 + mesh.uv_layers[0].name = "map1" + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + f"Meshes found in instance without valid UV's: {invalid}", + description=self.get_description() + ) + + def get_description(self): + return inspect.cleandoc( + """## Meshes must have map1 uv set + + To accompany a better Maya-focused pipeline with Alembics it is + expected that a Mesh has a `map1` UV set. Blender defaults to + a UV set named `UVMap` and thus needs to be renamed. + + """ + ) diff --git a/client/ayon_core/hosts/blender/plugins/publish/validate_transform_zero.py b/client/ayon_core/hosts/blender/plugins/publish/validate_transform_zero.py index 267eff47e4..465ec15d7b 100644 --- a/client/ayon_core/hosts/blender/plugins/publish/validate_transform_zero.py +++ b/client/ayon_core/hosts/blender/plugins/publish/validate_transform_zero.py @@ -1,3 +1,4 @@ +import inspect from typing import List import mathutils @@ -5,29 +6,26 @@ import bpy import pyblish.api +from ayon_core.hosts.blender.api import plugin, lib import ayon_core.hosts.blender.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, OptionalPyblishPluginMixin, - PublishValidationError + PublishValidationError, + RepairAction ) class ValidateTransformZero(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): - """Transforms can't have any values - - To solve this issue, try freezing the transforms. So long - as the transforms, rotation and scale values are zero, - you're all good. - - """ + """Transforms can't have any values""" order = ValidateContentsOrder hosts = ["blender"] families = ["model"] label = "Transform Zero" - actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction] + actions = [ayon_core.hosts.blender.api.action.SelectInvalidAction, + RepairAction] _identity = mathutils.Matrix() @@ -51,5 +49,46 @@ class ValidateTransformZero(pyblish.api.InstancePlugin, names = ", ".join(obj.name for obj in invalid) raise PublishValidationError( "Objects found in instance which do not" - f" have transform set to zero: {names}" + f" have transform set to zero: {names}", + description=self.get_description() ) + + @classmethod + def repair(cls, instance): + + invalid = cls.get_invalid(instance) + if not invalid: + return + + context = plugin.create_blender_context( + active=invalid[0], selected=invalid + ) + with lib.maintained_selection(): + with bpy.context.temp_override(**context): + plugin.deselect_all() + for obj in invalid: + obj.select_set(True) + + # TODO: Preferably this does allow custom pivot point locations + # and if so, this should likely apply to the delta instead + # using `bpy.ops.object.transforms_to_deltas(mode="ALL")` + bpy.ops.object.transform_apply(location=True, + rotation=True, + scale=True) + + def get_description(self): + return inspect.cleandoc( + """## Transforms can't have any values. + + The location, rotation and scale on the transform must be at + the default values. This also goes for the delta transforms. + + To solve this issue, try freezing the transforms: + - `Object` > `Apply` > `All Transforms` + + Using the Repair action directly will do the same. + + So long as the transforms, rotation and scale values are zero, + you're all good. + """ + ) diff --git a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py index bf1b4937cd..8350c7b7c8 100644 --- a/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py +++ b/client/ayon_core/hosts/celaction/hooks/pre_celaction_setup.py @@ -3,7 +3,7 @@ import shutil import winreg import subprocess from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.celaction import CELACTION_ROOT_DIR @@ -16,9 +16,9 @@ class CelactionPrelaunchHook(PreLaunchHook): launch_types = {LaunchTypes.local} def execute(self): - asset_doc = self.data["asset_doc"] - width = asset_doc["data"]["resolutionWidth"] - height = asset_doc["data"]["resolutionHeight"] + folder_attributes = self.data["folder_entity"]["attrib"] + width = folder_attributes["resolutionWidth"] + height = folder_attributes["resolutionHeight"] # Add workfile path to launch arguments workfile_path = self.workfile_path() @@ -118,7 +118,7 @@ class CelactionPrelaunchHook(PreLaunchHook): def workfile_path(self): workfile_path = self.data["last_workfile_path"] - # copy workfile from template if doesnt exist any on path + # copy workfile from template if doesn't exist any on path if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 54dea15dff..1820569918 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -3,11 +3,11 @@ import sys from pprint import pformat -class CollectCelactionCliKwargs(pyblish.api.Collector): +class CollectCelactionCliKwargs(pyblish.api.ContextPlugin): """ Collects all keyword arguments passed from the terminal """ label = "Collect Celaction Cli Kwargs" - order = pyblish.api.Collector.order - 0.1 + order = pyblish.api.CollectorOrder - 0.1 def process(self, context): args = list(sys.argv[1:]) diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py index 4306a53bfe..7c22201e3e 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -1,8 +1,6 @@ import os import pyblish.api -from ayon_core.client import get_asset_name_identifier - class CollectCelactionInstances(pyblish.api.ContextPlugin): """ Adds the celaction render instances """ @@ -16,24 +14,20 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): staging_dir = os.path.dirname(current_file) scene_file = os.path.basename(current_file) version = context.data["version"] - asset_entity = context.data["assetEntity"] - project_entity = context.data["projectEntity"] - asset_name = get_asset_name_identifier(asset_entity) + folder_entity = context.data["folderEntity"] + + folder_attributes = folder_entity["attrib"] shared_instance_data = { - "folderPath": asset_name, - "frameStart": asset_entity["data"]["frameStart"], - "frameEnd": asset_entity["data"]["frameEnd"], - "handleStart": asset_entity["data"]["handleStart"], - "handleEnd": asset_entity["data"]["handleEnd"], - "fps": asset_entity["data"]["fps"], - "resolutionWidth": asset_entity["data"].get( - "resolutionWidth", - project_entity["data"]["resolutionWidth"]), - "resolutionHeight": asset_entity["data"].get( - "resolutionHeight", - project_entity["data"]["resolutionHeight"]), + "folderPath": folder_entity["path"], + "frameStart": folder_attributes["frameStart"], + "frameEnd": folder_attributes["frameEnd"], + "handleStart": folder_attributes["handleStart"], + "handleEnd": folder_attributes["handleEnd"], + "fps": folder_attributes["fps"], + "resolutionWidth": folder_attributes["resolutionWidth"], + "resolutionHeight": folder_attributes["resolutionHeight"], "pixelAspect": 1, "step": 1, "version": version @@ -83,7 +77,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): # getting instance state instance.data["publish"] = True - # add assetEntity data into instance + # add folderEntity data into instance instance.data.update({ "label": "{} - farm".format(product_name), "productType": product_type, diff --git a/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py b/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py index abe670b691..1bb4d54831 100644 --- a/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py +++ b/client/ayon_core/hosts/celaction/plugins/publish/collect_render_path.py @@ -18,7 +18,7 @@ class CollectRenderPath(pyblish.api.InstancePlugin): def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) - padding = anatomy.templates.get("frame_padding", 4) + padding = anatomy.templates_obj.frame_padding product_type = "render" anatomy_data.update({ "frame": f"%0{padding}d", @@ -28,18 +28,17 @@ class CollectRenderPath(pyblish.api.InstancePlugin): }) anatomy_data["product"]["type"] = product_type - anatomy_filled = anatomy.format(anatomy_data) - # get anatomy rendering keys r_anatomy_key = self.anatomy_template_key_render_files m_anatomy_key = self.anatomy_template_key_metadata # get folder and path for rendering images from celaction - render_dir = anatomy_filled[r_anatomy_key]["folder"] - render_path = anatomy_filled[r_anatomy_key]["path"] + r_template_item = anatomy.get_template_item("publish", r_anatomy_key) + render_dir = r_template_item["directory"].format_strict(anatomy_data) + render_path = r_template_item["path"].format_strict(anatomy_data) self.log.debug("__ render_path: `{}`".format(render_path)) - # create dir if it doesnt exists + # create dir if it doesn't exists try: if not os.path.isdir(render_dir): os.makedirs(render_dir, exist_ok=True) @@ -51,11 +50,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin): instance.data["path"] = render_path # get anatomy for published renders folder path - if anatomy_filled.get(m_anatomy_key): - instance.data["publishRenderMetadataFolder"] = anatomy_filled[ - m_anatomy_key]["folder"] - self.log.info("Metadata render path: `{}`".format( - instance.data["publishRenderMetadataFolder"] - )) + m_template_item = anatomy.get_template_item( + "publish", m_anatomy_key, default=None + ) + if m_template_item is not None: + metadata_path = m_template_item["directory"].format_strict( + anatomy_data + ) + instance.data["publishRenderMetadataFolder"] = metadata_path + self.log.info("Metadata render path: `{}`".format(metadata_path)) self.log.info(f"Render output path set to: `{render_path}`") diff --git a/client/ayon_core/hosts/flame/api/__init__.py b/client/ayon_core/hosts/flame/api/__init__.py index c00ee958b6..8fcf0c92b0 100644 --- a/client/ayon_core/hosts/flame/api/__init__.py +++ b/client/ayon_core/hosts/flame/api/__init__.py @@ -1,5 +1,5 @@ """ -OpenPype Autodesk Flame api +AYON Autodesk Flame api """ from .constants import ( COLOR_MAP, @@ -23,7 +23,7 @@ from .lib import ( reset_segment_selection, get_segment_attributes, get_clips_in_reels, - get_reformated_filename, + get_reformatted_filename, get_frame_from_filename, get_padding_from_filename, maintained_object_duplication, @@ -101,7 +101,7 @@ __all__ = [ "reset_segment_selection", "get_segment_attributes", "get_clips_in_reels", - "get_reformated_filename", + "get_reformatted_filename", "get_frame_from_filename", "get_padding_from_filename", "maintained_object_duplication", diff --git a/client/ayon_core/hosts/flame/api/constants.py b/client/ayon_core/hosts/flame/api/constants.py index 1833031e13..04191c539d 100644 --- a/client/ayon_core/hosts/flame/api/constants.py +++ b/client/ayon_core/hosts/flame/api/constants.py @@ -1,14 +1,14 @@ """ -OpenPype Flame api constances +AYON Flame api constances """ -# OpenPype marker workflow variables +# AYON marker workflow variables MARKER_NAME = "OpenPypeData" MARKER_DURATION = 0 MARKER_COLOR = "cyan" MARKER_PUBLISH_DEFAULT = False -# OpenPype color definitions +# AYON color definitions COLOR_MAP = { "red": (1.0, 0.0, 0.0), "orange": (1.0, 0.5, 0.0), diff --git a/client/ayon_core/hosts/flame/api/lib.py b/client/ayon_core/hosts/flame/api/lib.py index efa23fe01e..8bfe6348ea 100644 --- a/client/ayon_core/hosts/flame/api/lib.py +++ b/client/ayon_core/hosts/flame/api/lib.py @@ -607,7 +607,7 @@ def get_clips_in_reels(project): return output_clips -def get_reformated_filename(filename, padded=True): +def get_reformatted_filename(filename, padded=True): """ Return fixed python expression path @@ -615,10 +615,10 @@ def get_reformated_filename(filename, padded=True): filename (str): file name Returns: - type: string with reformated path + type: string with reformatted path Example: - get_reformated_filename("plate.1001.exr") > plate.%04d.exr + get_reformatted_filename("plate.1001.exr") > plate.%04d.exr """ found = FRAME_PATTERN.search(filename) @@ -980,7 +980,7 @@ class MediaInfoFile(object): @property def file_pattern(self): - """Clips file patter + """Clips file pattern. Returns: str: file pattern. ex. file.[1-2].exr diff --git a/client/ayon_core/hosts/flame/api/pipeline.py b/client/ayon_core/hosts/flame/api/pipeline.py index 532f89b5e9..4578d7bb4b 100644 --- a/client/ayon_core/hosts/flame/api/pipeline.py +++ b/client/ayon_core/hosts/flame/api/pipeline.py @@ -38,12 +38,12 @@ def install(): pyblish.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) - log.info("OpenPype Flame plug-ins registered ...") + log.info("AYON Flame plug-ins registered ...") # register callback for switching publishable pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) - log.info("OpenPype Flame host installed ...") + log.info("AYON Flame host installed ...") def uninstall(): @@ -57,7 +57,7 @@ def uninstall(): # register callback for switching publishable pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) - log.info("OpenPype Flame host uninstalled ...") + log.info("AYON Flame host uninstalled ...") def containerise(flame_clip_segment, @@ -73,7 +73,7 @@ def containerise(flame_clip_segment, "name": str(name), "namespace": str(namespace), "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } if data: diff --git a/client/ayon_core/hosts/flame/api/plugin.py b/client/ayon_core/hosts/flame/api/plugin.py index cf28a3cef3..e656f33052 100644 --- a/client/ayon_core/hosts/flame/api/plugin.py +++ b/client/ayon_core/hosts/flame/api/plugin.py @@ -38,7 +38,7 @@ class CreatorWidget(QtWidgets.QDialog): | QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowStaysOnTopHint ) - self.setWindowTitle(name or "Pype Creator Input") + self.setWindowTitle(name or "AYON Creator Input") self.resize(500, 700) # Where inputs and labels are set @@ -644,13 +644,13 @@ class PublishableClip: "families": [self.base_product_type, self.product_type] } - def _convert_to_entity(self, type, template): + def _convert_to_entity(self, src_type, template): """ Converting input key to key with type. """ # convert to entity type - entity_type = self.types.get(type, None) + folder_type = self.types.get(src_type, None) - assert entity_type, "Missing entity type for `{}`".format( - type + assert folder_type, "Missing folder type for `{}`".format( + src_type ) # first collect formatting data to use for formatting template @@ -661,7 +661,7 @@ class PublishableClip: formatting_data[_k] = value return { - "entity_type": entity_type, + "folder_type": folder_type, "entity_name": template.format( **formatting_data ) @@ -748,18 +748,16 @@ class ClipLoader(LoaderPlugin): Returns: str: colorspace name or None """ - version = context['version'] - version_data = version.get("data", {}) - colorspace = version_data.get( - "colorspace", None - ) + version_entity = context["version"] + version_attributes = version_entity["attrib"] + colorspace = version_attributes.get("colorSpace") if ( not colorspace or colorspace == "Unknown" ): colorspace = context["representation"]["data"].get( - "colorspace", None) + "colorspace") return colorspace @@ -1020,7 +1018,7 @@ class OpenClipSolver(flib.MediaInfoFile): self.feed_version_name)) else: self.log.debug("adding new track element ..") - # create new track as it doesnt exists yet + # create new track as it doesn't exist yet # set current version to feeds on tmp tmp_xml_feeds = tmp_xml_track.find('feeds') tmp_xml_feeds.set('currentVersion', self.feed_version_name) diff --git a/client/ayon_core/hosts/flame/api/scripts/wiretap_com.py b/client/ayon_core/hosts/flame/api/scripts/wiretap_com.py index cffc6ec782..42b9257cbe 100644 --- a/client/ayon_core/hosts/flame/api/scripts/wiretap_com.py +++ b/client/ayon_core/hosts/flame/api/scripts/wiretap_com.py @@ -61,7 +61,7 @@ class WireTapCom(object): def get_launch_args( self, project_name, project_data, user_name, *args, **kwargs): - """Forming launch arguments for OpenPype launcher. + """Forming launch arguments for AYON launcher. Args: project_name (str): name of project diff --git a/client/ayon_core/hosts/flame/api/utils.py b/client/ayon_core/hosts/flame/api/utils.py index 91584456a6..b76dd92ada 100644 --- a/client/ayon_core/hosts/flame/api/utils.py +++ b/client/ayon_core/hosts/flame/api/utils.py @@ -11,7 +11,7 @@ log = Logger.get_logger(__name__) def _sync_utility_scripts(env=None): """ Synchronizing basic utlility scripts for flame. - To be able to run start OpenPype within Flame we have to copy + To be able to run start AYON within Flame we have to copy all utility_scripts and additional FLAME_SCRIPT_DIR into `/opt/Autodesk/shared/python`. This will be always synchronizing those folders. @@ -124,7 +124,7 @@ def setup(env=None): # synchronize resolve utility scripts _sync_utility_scripts(env) - log.info("Flame OpenPype wrapper has been installed") + log.info("Flame AYON wrapper has been installed") def get_flame_version(): diff --git a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py index 391332d368..77a9435205 100644 --- a/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py +++ b/client/ayon_core/hosts/flame/hooks/pre_flame_setup.py @@ -9,7 +9,7 @@ from ayon_core.lib import ( get_ayon_username, run_subprocess, ) -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts import flame as opflame @@ -36,8 +36,8 @@ class FlamePrelaunch(PreLaunchHook): self.flame_pythonpath = _env["AYON_FLAME_PYTHONPATH"] """Hook entry method.""" - project_doc = self.data["project_doc"] - project_name = project_doc["name"] + project_entity = self.data["project_entity"] + project_name = project_entity["name"] volume_name = _env.get("FLAME_WIRETAP_VOLUME") # get image io @@ -63,20 +63,22 @@ class FlamePrelaunch(PreLaunchHook): hostname = socket.gethostname() # not returning wiretap host name self.log.debug("Collected user \"{}\"".format(user_name)) - self.log.info(pformat(project_doc)) - _db_p_data = project_doc["data"] - width = _db_p_data["resolutionWidth"] - height = _db_p_data["resolutionHeight"] - fps = float(_db_p_data["fps"]) + self.log.info(pformat(project_entity)) + project_attribs = project_entity["attrib"] + width = project_attribs["resolutionWidth"] + height = project_attribs["resolutionHeight"] + fps = float(project_attribs["fps"]) project_data = { - "Name": project_doc["name"], - "Nickname": _db_p_data["code"], - "Description": "Created by OpenPype", - "SetupDir": project_doc["name"], + "Name": project_entity["name"], + "Nickname": project_entity["code"], + "Description": "Created by AYON", + "SetupDir": project_entity["name"], "FrameWidth": int(width), "FrameHeight": int(height), - "AspectRatio": float((width / height) * _db_p_data["pixelAspect"]), + "AspectRatio": float( + (width / height) * project_attribs["pixelAspect"] + ), "FrameRate": self._get_flame_fps(fps) } diff --git a/client/ayon_core/hosts/flame/otio/flame_export.py b/client/ayon_core/hosts/flame/otio/flame_export.py index e5ea4dcf5e..cb038f9e9a 100644 --- a/client/ayon_core/hosts/flame/otio/flame_export.py +++ b/client/ayon_core/hosts/flame/otio/flame_export.py @@ -256,7 +256,7 @@ def create_otio_reference(clip_data, fps=None): if not otio_ex_ref_item: dirname, file_name = os.path.split(path) - file_name = utils.get_reformated_filename(file_name, padded=False) + file_name = utils.get_reformatted_filename(file_name, padded=False) reformated_path = os.path.join(dirname, file_name) # in case old OTIO or video file create `ExternalReference` otio_ex_ref_item = otio.schema.ExternalReference( diff --git a/client/ayon_core/hosts/flame/otio/utils.py b/client/ayon_core/hosts/flame/otio/utils.py index 7ded8e55d8..5a28263fc2 100644 --- a/client/ayon_core/hosts/flame/otio/utils.py +++ b/client/ayon_core/hosts/flame/otio/utils.py @@ -21,7 +21,7 @@ def frames_to_seconds(frames, framerate): return otio.opentime.to_seconds(rt) -def get_reformated_filename(filename, padded=True): +def get_reformatted_filename(filename, padded=True): """ Return fixed python expression path @@ -29,10 +29,10 @@ def get_reformated_filename(filename, padded=True): filename (str): file name Returns: - type: string with reformated path + type: string with reformatted path Example: - get_reformated_filename("plate.1001.exr") > plate.%04d.exr + get_reformatted_filename("plate.1001.exr") > plate.%04d.exr """ found = FRAME_PATTERN.search(filename) diff --git a/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py index c73ee7510c..56f5319f21 100644 --- a/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/flame/plugins/create/create_shot_clip.py @@ -17,7 +17,7 @@ class CreateShotClip(opfapi.Creator): presets = deepcopy(self.presets) gui_inputs = self.get_gui_inputs() - # get key pares from presets and match it on ui inputs + # get key pairs from presets and match it on ui inputs for k, v in gui_inputs.items(): if v["type"] in ("dict", "section"): # nested dictionary (only one level allowed @@ -207,14 +207,14 @@ class CreateShotClip(opfapi.Creator): "value": ["[ track name ]", "main", "bg", "fg", "bg", "animatic"], "type": "QComboBox", - "label": "Subset Name", + "label": "Product Name", "target": "ui", "toolTip": "chose product name pattern, if [ track name ] is selected, name of track layer will be used", # noqa "order": 0}, "productType": { "value": ["plate", "take"], "type": "QComboBox", - "label": "Subset Family", + "label": "Product Type", "target": "ui", "toolTip": "What use of this product is for", # noqa "order": 1}, "reviewTrack": { @@ -236,7 +236,7 @@ class CreateShotClip(opfapi.Creator): "type": "QCheckBox", "label": "Source resolution", "target": "tag", - "toolTip": "Is resloution taken from timeline or source?", # noqa + "toolTip": "Is resolution taken from timeline or source?", # noqa "order": 4}, } }, diff --git a/client/ayon_core/hosts/flame/plugins/load/load_clip.py b/client/ayon_core/hosts/flame/plugins/load/load_clip.py index 84f63b3177..40ab9c038b 100644 --- a/client/ayon_core/hosts/flame/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/flame/plugins/load/load_clip.py @@ -17,8 +17,8 @@ class LoadClip(opfapi.ClipLoader): during conforming to project """ - families = ["render2d", "source", "plate", "render", "review"] - representations = ["*"] + product_types = {"render2d", "source", "plate", "render", "review"} + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) @@ -48,9 +48,9 @@ class LoadClip(opfapi.ClipLoader): self.fpd = fproject.current_workspace.desktop # load clip to timeline and get main variables - version = context['version'] - version_data = version.get("data", {}) - version_name = version.get("name", None) + version_entity = context["version"] + version_attributes = version_entity["attrib"] + version_name = version_entity["version"] colorspace = self.get_colorspace(context) # in case output is not in context replace key to representation @@ -112,11 +112,10 @@ class LoadClip(opfapi.ClipLoader): ] # move all version data keys to tag data - data_imprint = {} - for key in add_keys: - data_imprint.update({ - key: version_data.get(key, str(None)) - }) + data_imprint = { + key: version_attributes.get(key, str(None)) + for key in add_keys + } # add variables related to version context data_imprint.update({ @@ -180,27 +179,27 @@ class LoadClip(opfapi.ClipLoader): # unwrapping segment from input clip pass - # def switch(self, container, representation): - # self.update(container, representation) + # def switch(self, container, context): + # self.update(container, context) - # def update(self, container, representation): + # def update(self, container, context): # """ Updating previously loaded clips # """ - # # load clip to timeline and get main variables + # repre_entity = context['representation'] # name = container['name'] # namespace = container['namespace'] # track_item = phiero.get_track_items( # track_item_name=namespace) # version = io.find_one({ # "type": "version", - # "_id": representation["parent"] + # "id": repre_entity["versionId"] # }) # version_data = version.get("data", {}) # version_name = version.get("name", None) - # colorspace = version_data.get("colorspace", None) + # colorspace = version_data.get("colorSpace", None) # object_name = "{}_{}".format(name, namespace) - # file = get_representation_path(representation).replace("\\", "/") + # file = get_representation_path(repre_entity).replace("\\", "/") # clip = track_item.source() # # reconnect media to new path @@ -225,7 +224,7 @@ class LoadClip(opfapi.ClipLoader): # # add variables related to version context # data_imprint.update({ - # "representation": str(representation["_id"]), + # "representation": repre_entity["id"], # "version": version_name, # "colorspace": colorspace, # "objectName": object_name diff --git a/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py b/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py index 9f81103cb4..1b23a8b465 100644 --- a/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py +++ b/client/ayon_core/hosts/flame/plugins/load/load_clip_batch.py @@ -16,8 +16,8 @@ class LoadClipBatch(opfapi.ClipLoader): during conforming to project """ - families = ["render2d", "source", "plate", "render", "review"] - representations = ["*"] + product_types = {"render2d", "source", "plate", "render", "review"} + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) @@ -45,9 +45,9 @@ class LoadClipBatch(opfapi.ClipLoader): self.batch = options.get("batch") or flame.batch # load clip to timeline and get main variables - version = context['version'] - version_data = version.get("data", {}) - version_name = version.get("name", None) + version_entity = context["version"] + version_attributes =version_entity["attrib"] + version_name = version_entity["version"] colorspace = self.get_colorspace(context) clip_name_template = self.clip_name_template @@ -59,20 +59,20 @@ class LoadClipBatch(opfapi.ClipLoader): layer_rename_template = layer_rename_template.replace( "output", "representation") - asset_doc = context["asset"] - subset_doc = context["subset"] + folder_entity = context["folder"] + product_entity = context["product"] formatting_data = deepcopy(context["representation"]["context"]) formatting_data["batch"] = self.batch.name.get_value() formatting_data.update({ - "asset": asset_doc["name"], + "asset": folder_entity["name"], "folder": { - "name": asset_doc["name"], + "name": folder_entity["name"], }, - "subset": subset_doc["name"], - "family": subset_doc["data"]["family"], + "subset": product_entity["name"], + "family": product_entity["productType"], "product": { - "name": subset_doc["name"], - "type": subset_doc["data"]["family"], + "name": product_entity["name"], + "type": product_entity["productType"], } }) @@ -129,7 +129,7 @@ class LoadClipBatch(opfapi.ClipLoader): # move all version data keys to tag data data_imprint = { - key: version_data.get(key, str(None)) + key: version_attributes.get(key, str(None)) for key in add_keys } # add variables related to version context diff --git a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py index 9d6560023c..ca5475824d 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -37,7 +37,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): self.otio_timeline = context.data["otioTimeline"] self.fps = context.data["fps"] - # process all sellected + # process all selected for segment in selected_segments: # get openpype tag data marker_data = opfapi.get_segment_data_marker(segment) @@ -100,6 +100,12 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin): marker_data["handleEnd"] = min( marker_data["handleEnd"], tail) + # Backward compatibility fix of 'entity_type' > 'folder_type' + if "parents" in marker_data: + for parent in marker_data["parents"]: + if "entity_type" in parent: + parent["folder_type"] = parent.pop("entity_type") + workfile_start = self._set_workfile_start(marker_data) with_audio = bool(marker_data.pop("audio")) diff --git a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py index 6d04d53cea..7609ea7879 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py +++ b/client/ayon_core/hosts/flame/plugins/publish/collect_timeline_otio.py @@ -1,6 +1,5 @@ import pyblish.api -from ayon_core.client import get_asset_name_identifier import ayon_core.hosts.flame.api as opfapi from ayon_core.hosts.flame.otio import flame_export from ayon_core.pipeline.create import get_product_name @@ -18,31 +17,33 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin): variant = "otioTimeline" # main - asset_doc = context.data["assetEntity"] - task_name = context.data["task"] + folder_entity = context.data["folderEntity"] project = opfapi.get_current_project() sequence = opfapi.get_current_sequence(opfapi.CTX.selection) # create product name + task_entity = context.data["taskEntity"] + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] product_name = get_product_name( context.data["projectName"], - asset_doc, task_name, + task_type, context.data["hostName"], product_type, variant, project_settings=context.data["project_settings"] ) - folder_path = get_asset_name_identifier(asset_doc) - # adding otio timeline to context with opfapi.maintained_segment_selection(sequence) as selected_seg: otio_timeline = flame_export.create_otio_timeline(sequence) instance_data = { "name": product_name, - "folderPath": folder_path, + "folderPath": folder_entity["path"], "productName": product_name, "productType": product_type, "family": product_type, diff --git a/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py b/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py index a66ee9f2c0..d8669f836d 100644 --- a/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py +++ b/client/ayon_core/hosts/flame/plugins/publish/integrate_batch_group.py @@ -219,7 +219,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): # update task data in anatomy data project_task_types = anatomy_obj["tasks"] - task_code = project_task_types.get(task_type, {}).get("short_name") + task_code = project_task_types.get(task_type, {}).get("shortName") anatomy_data.update({ "task": { "name": task_name, @@ -247,7 +247,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): os.makedirs(render_dir_path, mode=0o777) # TODO: add most of these to `imageio/flame/batch/write_node` - name = "{project[code]}_{asset}_{task[name]}".format( + name = "{project[code]}_{folder[name]}_{task[name]}".format( **anatomy_data ) @@ -321,16 +321,17 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin): )) def _get_shot_task_dir_path(self, instance, task_data): - project_doc = instance.data["projectEntity"] - asset_entity = instance.data["assetEntity"] + project_entity = instance.data["projectEntity"] + folder_entity = instance.data["folderEntity"] + task_entity = instance.data["taskEntity"] anatomy = instance.context.data["anatomy"] project_settings = instance.context.data["project_settings"] return get_workdir( - project_doc, - asset_entity, - task_data["name"], + project_entity, + folder_entity, + task_entity, "flame", - anatomy, + anatomy=anatomy, project_settings=project_settings ) diff --git a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py index 0e84a5ef52..a66980493e 100644 --- a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py +++ b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/ftrack_lib.py @@ -396,7 +396,7 @@ class FtrackEntityOperator: entity = session.query(query).first() - # if entity doesnt exist then create one + # if entity doesn't exist then create one if not entity: entity = self.create_ftrack_entity( session, diff --git a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py index 5c5bb0b4a1..ce023a9e4d 100644 --- a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py +++ b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/modules/panel_app.py @@ -79,7 +79,7 @@ class FlameBabyPublisherPanel(object): # creating ui self.window.setMinimumSize(1500, 600) - self.window.setWindowTitle('OpenPype: Baby-publisher') + self.window.setWindowTitle('AYON: Baby-publisher') self.window.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) self.window.setAttribute(QtCore.Qt.WA_DeleteOnClose) self.window.setFocusPolicy(QtCore.Qt.StrongFocus) diff --git a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/openpype_babypublisher.py b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/openpype_babypublisher.py index 4675d163e3..76d74b5970 100644 --- a/client/ayon_core/hosts/flame/startup/openpype_babypublisher/openpype_babypublisher.py +++ b/client/ayon_core/hosts/flame/startup/openpype_babypublisher/openpype_babypublisher.py @@ -31,7 +31,7 @@ def scope_sequence(selection): def get_media_panel_custom_ui_actions(): return [ { - "name": "OpenPype: Baby-publisher", + "name": "AYON: Baby-publisher", "actions": [ { "name": "Create Shots", diff --git a/client/ayon_core/hosts/flame/startup/openpype_in_flame.py b/client/ayon_core/hosts/flame/startup/openpype_in_flame.py index cf0a24ede2..b9cbf9700b 100644 --- a/client/ayon_core/hosts/flame/startup/openpype_in_flame.py +++ b/client/ayon_core/hosts/flame/startup/openpype_in_flame.py @@ -12,7 +12,7 @@ from ayon_core.pipeline import ( def openpype_install(): - """Registering OpenPype in context + """Registering AYON in context """ install_host(opfapi) print("Registered host: {}".format(registered_host())) @@ -28,7 +28,7 @@ def exeption_handler(exctype, value, _traceback): tb (str): traceback to show """ import traceback - msg = "OpenPype: Python exception {} in {}".format(value, exctype) + msg = "AYON: Python exception {} in {}".format(value, exctype) mbox = QtWidgets.QMessageBox() mbox.setText(msg) mbox.setDetailedText( diff --git a/client/ayon_core/hosts/fusion/api/__init__.py b/client/ayon_core/hosts/fusion/api/__init__.py index aabc624016..d2feee6d23 100644 --- a/client/ayon_core/hosts/fusion/api/__init__.py +++ b/client/ayon_core/hosts/fusion/api/__init__.py @@ -9,13 +9,13 @@ from .pipeline import ( from .lib import ( maintained_selection, update_frame_range, - set_asset_framerange, + set_current_context_framerange, get_current_comp, get_bmd_library, comp_lock_and_undo_chunk ) -from .menu import launch_openpype_menu +from .menu import launch_ayon_menu __all__ = [ @@ -29,11 +29,11 @@ __all__ = [ # lib "maintained_selection", "update_frame_range", - "set_asset_framerange", + "set_current_context_framerange", "get_current_comp", "get_bmd_library", "comp_lock_and_undo_chunk", # menu - "launch_openpype_menu", + "launch_ayon_menu", ] diff --git a/client/ayon_core/hosts/fusion/api/lib.py b/client/ayon_core/hosts/fusion/api/lib.py index b31f812c1b..08722463e1 100644 --- a/client/ayon_core/hosts/fusion/api/lib.py +++ b/client/ayon_core/hosts/fusion/api/lib.py @@ -3,20 +3,11 @@ import sys import re import contextlib -from ayon_core.lib import Logger -from ayon_core.client import ( - get_asset_by_name, - get_subset_by_name, - get_last_version_by_subset_id, - get_representation_by_id, - get_representation_by_name, - get_representation_parents, -) -from ayon_core.pipeline import ( - switch_container, - get_current_project_name, -) -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.lib import Logger, BoolDef, UILabelDef +from ayon_core.style import load_stylesheet +from ayon_core.pipeline import registered_host +from ayon_core.pipeline.create import CreateContext +from ayon_core.pipeline.context_tools import get_current_folder_entity self = sys.modules[__name__] self._project = None @@ -63,23 +54,46 @@ def update_frame_range(start, end, comp=None, set_render_range=True, comp.SetAttrs(attrs) -def set_asset_framerange(): - """Set Comp's frame range based on current asset""" - asset_doc = get_current_project_asset() - start = asset_doc["data"]["frameStart"] - end = asset_doc["data"]["frameEnd"] - handle_start = asset_doc["data"]["handleStart"] - handle_end = asset_doc["data"]["handleEnd"] +def set_current_context_framerange(folder_entity=None): + """Set Comp's frame range based on current folder.""" + if folder_entity is None: + folder_entity = get_current_folder_entity( + fields={"attrib.frameStart", + "attrib.frameEnd", + "attrib.handleStart", + "attrib.handleEnd"}) + + folder_attributes = folder_entity["attrib"] + start = folder_attributes["frameStart"] + end = folder_attributes["frameEnd"] + handle_start = folder_attributes["handleStart"] + handle_end = folder_attributes["handleEnd"] update_frame_range(start, end, set_render_range=True, handle_start=handle_start, handle_end=handle_end) -def set_asset_resolution(): - """Set Comp's resolution width x height default based on current asset""" - asset_doc = get_current_project_asset() - width = asset_doc["data"]["resolutionWidth"] - height = asset_doc["data"]["resolutionHeight"] +def set_current_context_fps(folder_entity=None): + """Set Comp's frame rate (FPS) to based on current asset""" + if folder_entity is None: + folder_entity = get_current_folder_entity(fields={"attrib.fps"}) + + fps = float(folder_entity["attrib"].get("fps", 24.0)) + comp = get_current_comp() + comp.SetPrefs({ + "Comp.FrameFormat.Rate": fps, + }) + + +def set_current_context_resolution(folder_entity=None): + """Set Comp's resolution width x height default based on current folder""" + if folder_entity is None: + folder_entity = get_current_folder_entity( + fields={"attrib.resolutionWidth", "attrib.resolutionHeight"}) + + folder_attributes = folder_entity["attrib"] + width = folder_attributes["resolutionWidth"] + height = folder_attributes["resolutionHeight"] comp = get_current_comp() print("Setting comp frame format resolution to {}x{}".format(width, @@ -91,7 +105,7 @@ def set_asset_resolution(): def validate_comp_prefs(comp=None, force_repair=False): - """Validate current comp defaults with asset settings. + """Validate current comp defaults with folder settings. Validates fps, resolutionWidth, resolutionHeight, aspectRatio. @@ -103,22 +117,23 @@ def validate_comp_prefs(comp=None, force_repair=False): log = Logger.get_logger("validate_comp_prefs") - fields = [ - "name", - "data.fps", - "data.resolutionWidth", - "data.resolutionHeight", - "data.pixelAspect" - ] - asset_doc = get_current_project_asset(fields=fields) - asset_data = asset_doc["data"] + fields = { + "path", + "attrib.fps", + "attrib.resolutionWidth", + "attrib.resolutionHeight", + "attrib.pixelAspect", + } + folder_entity = get_current_folder_entity(fields=fields) + folder_path = folder_entity["path"] + folder_attributes = folder_entity["attrib"] comp_frame_format_prefs = comp.GetPrefs("Comp.FrameFormat") # Pixel aspect ratio in Fusion is set as AspectX and AspectY so we convert # the data to something that is more sensible to Fusion - asset_data["pixelAspectX"] = asset_data.pop("pixelAspect") - asset_data["pixelAspectY"] = 1.0 + folder_attributes["pixelAspectX"] = folder_attributes.pop("pixelAspect") + folder_attributes["pixelAspectY"] = 1.0 validations = [ ("fps", "Rate", "FPS"), @@ -130,23 +145,23 @@ def validate_comp_prefs(comp=None, force_repair=False): invalid = [] for key, comp_key, label in validations: - asset_value = asset_data[key] + folder_value = folder_attributes[key] comp_value = comp_frame_format_prefs.get(comp_key) - if asset_value != comp_value: + if folder_value != comp_value: invalid_msg = "{} {} should be {}".format(label, comp_value, - asset_value) + folder_value) invalid.append(invalid_msg) if not force_repair: # Do not log warning if we force repair anyway log.warning( - "Comp {pref} {value} does not match asset " - "'{asset_name}' {pref} {asset_value}".format( + "Comp {pref} {value} does not match folder " + "'{folder_path}' {pref} {folder_value}".format( pref=label, value=comp_value, - asset_name=asset_doc["name"], - asset_value=asset_value) + folder_path=folder_path, + folder_value=folder_value) ) if invalid: @@ -154,7 +169,7 @@ def validate_comp_prefs(comp=None, force_repair=False): def _on_repair(): attributes = dict() for key, comp_key, _label in validations: - value = asset_data[key] + value = folder_value[key] comp_key_full = "Comp.FrameFormat.{}".format(comp_key) attributes[comp_key_full] = value comp.SetPrefs(attributes) @@ -166,11 +181,10 @@ def validate_comp_prefs(comp=None, force_repair=False): from . import menu from ayon_core.tools.utils import SimplePopup - from ayon_core.style import load_stylesheet dialog = SimplePopup(parent=menu.menu) dialog.setWindowTitle("Fusion comp has invalid configuration") - msg = "Comp preferences mismatches '{}'".format(asset_doc["name"]) + msg = "Comp preferences mismatches '{}'".format(folder_path) msg += "\n" + "\n".join(invalid) dialog.set_message(msg) dialog.set_button_text("Repair") @@ -293,3 +307,96 @@ def comp_lock_and_undo_chunk( finally: comp.Unlock() comp.EndUndo(keep_undo) + + +def update_content_on_context_change(): + """Update all Creator instances to current asset""" + host = registered_host() + context = host.get_current_context() + + folder_path = context["folder_path"] + task = context["task_name"] + + create_context = CreateContext(host, reset=True) + + for instance in create_context.instances: + instance_folder_path = instance.get("folderPath") + if instance_folder_path and instance_folder_path != folder_path: + instance["folderPath"] = folder_path + instance_task = instance.get("task") + if instance_task and instance_task != task: + instance["task"] = task + + create_context.save_changes() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from qtpy import QtCore + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset Comp FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset Comp start and end frame ranges", + default=True + ), + BoolDef( + "resolution", + label="Comp Resolution", + tooltip="Reset Comp resolution", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowFlags( + dialog.windowFlags() | QtCore.Qt.WindowStaysOnTopHint + ) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + folder_entity = get_current_folder_entity() + if options["frame_range"]: + set_current_context_framerange(folder_entity) + + if options["fps"]: + set_current_context_fps(folder_entity) + + if options["resolution"]: + set_current_context_resolution(folder_entity) + + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() diff --git a/client/ayon_core/hosts/fusion/api/menu.py b/client/ayon_core/hosts/fusion/api/menu.py index a2b0a7b628..6a64ad2120 100644 --- a/client/ayon_core/hosts/fusion/api/menu.py +++ b/client/ayon_core/hosts/fusion/api/menu.py @@ -10,10 +10,10 @@ from ayon_core.hosts.fusion.scripts import ( duplicate_with_inputs, ) from ayon_core.hosts.fusion.api.lib import ( - set_asset_framerange, - set_asset_resolution, + set_current_context_framerange, + set_current_context_resolution, ) -from ayon_core.pipeline import get_current_asset_name +from ayon_core.pipeline import get_current_folder_path from ayon_core.resources import get_ayon_icon_filepath from ayon_core.tools.utils import get_qt_app @@ -28,9 +28,9 @@ self = sys.modules[__name__] self.menu = None -class OpenPypeMenu(QtWidgets.QWidget): +class AYONMenu(QtWidgets.QWidget): def __init__(self, *args, **kwargs): - super(OpenPypeMenu, self).__init__(*args, **kwargs) + super(AYONMenu, self).__init__(*args, **kwargs) self.setObjectName(f"{MENU_LABEL}Menu") @@ -49,15 +49,15 @@ class OpenPypeMenu(QtWidgets.QWidget): self.render_mode_widget = None self.setWindowTitle(MENU_LABEL) - asset_label = QtWidgets.QLabel("Context", self) - asset_label.setStyleSheet( + context_label = QtWidgets.QLabel("Context", self) + context_label.setStyleSheet( """QLabel { font-size: 14px; font-weight: 600; color: #5f9fb8; }""" ) - asset_label.setAlignment(QtCore.Qt.AlignHCenter) + context_label.setAlignment(QtCore.Qt.AlignHCenter) workfiles_btn = QtWidgets.QPushButton("Workfiles...", self) create_btn = QtWidgets.QPushButton("Create...", self) @@ -74,7 +74,7 @@ class OpenPypeMenu(QtWidgets.QWidget): layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(10, 20, 10, 20) - layout.addWidget(asset_label) + layout.addWidget(context_label) layout.addSpacing(20) @@ -103,7 +103,7 @@ class OpenPypeMenu(QtWidgets.QWidget): self.setLayout(layout) # Store reference so we can update the label - self.asset_label = asset_label + self.context_label = context_label workfiles_btn.clicked.connect(self.on_workfile_clicked) create_btn.clicked.connect(self.on_create_clicked) @@ -125,14 +125,14 @@ class OpenPypeMenu(QtWidgets.QWidget): self._pulse = FusionPulse(parent=self) self._pulse.start() - # Detect Fusion events as OpenPype events + # Detect Fusion events as AYON events self._event_handler = FusionEventHandler(parent=self) self._event_handler.start() def on_task_changed(self): # Update current context label - label = get_current_asset_name() - self.asset_label.setText(label) + label = get_current_folder_path() + self.context_label.setText(label) def register_callback(self, name, fn): # Create a wrapper callback that we only store @@ -168,22 +168,22 @@ class OpenPypeMenu(QtWidgets.QWidget): duplicate_with_inputs.duplicate_with_input_connections() def on_set_resolution_clicked(self): - set_asset_resolution() + set_current_context_resolution() def on_set_framerange_clicked(self): - set_asset_framerange() + set_current_context_framerange() -def launch_openpype_menu(): +def launch_ayon_menu(): app = get_qt_app() - pype_menu = OpenPypeMenu() + ayon_menu = AYONMenu() stylesheet = load_stylesheet() - pype_menu.setStyleSheet(stylesheet) + ayon_menu.setStyleSheet(stylesheet) - pype_menu.show() - self.menu = pype_menu + ayon_menu.show() + self.menu = ayon_menu result = app.exec_() print("Shutting down..") diff --git a/client/ayon_core/hosts/fusion/api/pipeline.py b/client/ayon_core/hosts/fusion/api/pipeline.py index 0e9e0724c7..2d1073ec7d 100644 --- a/client/ayon_core/hosts/fusion/api/pipeline.py +++ b/client/ayon_core/hosts/fusion/api/pipeline.py @@ -5,6 +5,7 @@ import os import sys import logging import contextlib +from pathlib import Path import pyblish.api from qtpy import QtCore @@ -28,8 +29,8 @@ from ayon_core.tools.utils import host_tools from .lib import ( get_current_comp, - comp_lock_and_undo_chunk, - validate_comp_prefs + validate_comp_prefs, + prompt_reset_context ) log = Logger.get_logger(__name__) @@ -41,6 +42,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +# Track whether the workfile tool is about to save +_about_to_save = False + class FusionLogHandler(logging.Handler): # Keep a reference to fusion's Print function (Remote Object) @@ -70,7 +74,7 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "fusion" def install(self): - """Install fusion-specific functionality of OpenPype. + """Install fusion-specific functionality of AYON. This is where you install menus and register families, data and loaders into fusion. @@ -104,8 +108,10 @@ class FusionHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): # Register events register_event_callback("open", on_after_open) + register_event_callback("workfile.save.before", before_workfile_save) register_event_callback("save", on_save) register_event_callback("new", on_new) + register_event_callback("taskChanged", on_task_changed) # region workfile io api def has_unsaved_changes(self): @@ -169,6 +175,19 @@ def on_save(event): comp = event["sender"] validate_comp_prefs(comp) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + + +def on_task_changed(): + global _about_to_save + print(f"Task changed: {_about_to_save}") + # TODO: Only do this if not headless + if _about_to_save: + # Let's prompt the user to update the context settings or not + prompt_reset_context() + def on_after_open(event): comp = event["sender"] @@ -177,7 +196,7 @@ def on_after_open(event): if any_outdated_containers(): log.warning("Scene has outdated content.") - # Find OpenPype menu to attach to + # Find AYON menu to attach to from . import menu def _on_show_scene_inventory(): @@ -202,6 +221,28 @@ def on_after_open(event): dialog.setStyleSheet(load_stylesheet()) +def before_workfile_save(event): + # Due to Fusion's external python process design we can't really + # detect whether the current Fusion environment matches the one the artists + # expects it to be. For example, our pipeline python process might + # have been shut down, and restarted - which will restart it to the + # environment Fusion started with; not necessarily where the artist + # is currently working. + # The `_about_to_save` var is used to detect context changes when + # saving into another asset. If we keep it False it will be ignored + # as context change. As such, before we change tasks we will only + # consider it the current filepath is within the currently known + # AVALON_WORKDIR. This way we avoid false positives of thinking it's + # saving to another context and instead sometimes just have false negatives + # where we fail to show the "Update on task change" prompt. + comp = get_current_comp() + filepath = comp.GetAttrs()["COMPS_FileName"] + workdir = os.environ.get("AYON_WORKDIR") + if Path(workdir) in Path(filepath).parents: + global _about_to_save + _about_to_save = True + + def ls(): """List containers from active Fusion scene @@ -252,7 +293,7 @@ def imprint_container(tool, ("name", str(name)), ("namespace", str(namespace)), ("loader", str(loader)), - ("representation", str(context["representation"]["_id"])), + ("representation", context["representation"]["id"]), ] for key, value in data: @@ -326,9 +367,9 @@ class FusionEventThread(QtCore.QThread): class FusionEventHandler(QtCore.QObject): - """Emits OpenPype events based on Fusion events captured in a QThread. + """Emits AYON events based on Fusion events captured in a QThread. - This will emit the following OpenPype events based on Fusion actions: + This will emit the following AYON events based on Fusion actions: save: Comp_Save, Comp_SaveAs open: Comp_Opened new: Comp_New @@ -338,7 +379,6 @@ class FusionEventHandler(QtCore.QObject): >>> handler = FusionEventHandler(parent=window) >>> handler.start() - """ ACTION_IDS = [ "Comp_Save", @@ -374,7 +414,7 @@ class FusionEventHandler(QtCore.QObject): self._event_thread.stop() def _on_event(self, event): - """Handle Fusion events to emit OpenPype events""" + """Handle Fusion events to emit AYON events""" if not event: return diff --git a/client/ayon_core/hosts/fusion/api/plugin.py b/client/ayon_core/hosts/fusion/api/plugin.py index 95db8126e7..efe8269120 100644 --- a/client/ayon_core/hosts/fusion/api/plugin.py +++ b/client/ayon_core/hosts/fusion/api/plugin.py @@ -16,6 +16,12 @@ from ayon_core.pipeline import ( AVALON_INSTANCE_ID, AYON_INSTANCE_ID, ) +from ayon_core.pipeline.workfile import get_workdir +from ayon_api import ( + get_project, + get_folder_by_path, + get_task_by_name +) class GenericCreateSaver(Creator): @@ -125,6 +131,8 @@ class GenericCreateSaver(Creator): product_name = data["productName"] if ( original_product_name != product_name + or tool.GetData("openpype.task") != data["task"] + or tool.GetData("openpype.folderPath") != data["folderPath"] or original_format != data["creator_attributes"]["image_format"] ): self._configure_saver_tool(data, tool, product_name) @@ -133,19 +141,42 @@ class GenericCreateSaver(Creator): formatting_data = deepcopy(data) # get frame padding from anatomy templates - frame_padding = self.project_anatomy.templates["frame_padding"] + frame_padding = self.project_anatomy.templates_obj.frame_padding # get output format ext = data["creator_attributes"]["image_format"] - # Subset change detected + # Product change detected product_type = formatting_data["productType"] f_product_name = formatting_data["productName"] folder_path = formatting_data["folderPath"] folder_name = folder_path.rsplit("/", 1)[-1] - workdir = os.path.normpath(os.getenv("AYON_WORKDIR")) + # If the folder path and task do not match the current context then the + # workdir is not just the `AYON_WORKDIR`. Hence, we need to actually + # compute the resulting workdir + if ( + data["folderPath"] == self.create_context.get_current_folder_path() + and data["task"] == self.create_context.get_current_task_name() + ): + workdir = os.path.normpath(os.getenv("AYON_WORKDIR")) + else: + # TODO: Optimize this logic + project_name = self.create_context.get_current_project_name() + project_entity = get_project(project_name) + folder_entity = get_folder_by_path(project_name, + data["folderPath"]) + task_entity = get_task_by_name(project_name, + folder_id=folder_entity["id"], + task_name=data["task"]) + workdir = get_workdir( + project_entity=project_entity, + folder_entity=folder_entity, + task_entity=task_entity, + host_name=self.create_context.host_name, + ) + formatting_data.update({ "workdir": workdir, "frame": "0" * frame_padding, diff --git a/client/ayon_core/hosts/fusion/deploy/MenuScripts/README.md b/client/ayon_core/hosts/fusion/deploy/MenuScripts/README.md index f87eaea4a2..e291b8d8f2 100644 --- a/client/ayon_core/hosts/fusion/deploy/MenuScripts/README.md +++ b/client/ayon_core/hosts/fusion/deploy/MenuScripts/README.md @@ -1,6 +1,6 @@ -### OpenPype deploy MenuScripts +### AYON deploy MenuScripts Note that this `MenuScripts` is not an official Fusion folder. -OpenPype only uses this folder in `{fusion}/deploy/` to trigger the OpenPype menu actions. +AYON only uses this folder in `{fusion}/deploy/` to trigger the AYON menu actions. They are used in the actions defined in `.fu` files in `{fusion}/deploy/Config`. \ No newline at end of file diff --git a/client/ayon_core/hosts/fusion/deploy/MenuScripts/launch_menu.py b/client/ayon_core/hosts/fusion/deploy/MenuScripts/launch_menu.py index 23b02b1b69..640f78eeb8 100644 --- a/client/ayon_core/hosts/fusion/deploy/MenuScripts/launch_menu.py +++ b/client/ayon_core/hosts/fusion/deploy/MenuScripts/launch_menu.py @@ -35,7 +35,7 @@ def main(env): log = Logger.get_logger(__name__) log.info(f"Registered host: {registered_host()}") - menu.launch_openpype_menu() + menu.launch_ayon_menu() # Initiate a QTimer to check if Fusion is still alive every X interval # If Fusion is not found - kill itself diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py new file mode 100644 index 0000000000..113a1ffe59 --- /dev/null +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_launch_menu_hook.py @@ -0,0 +1,36 @@ +import os +from ayon_applications import PreLaunchHook +from ayon_core.hosts.fusion import FUSION_HOST_DIR + + +class FusionLaunchMenuHook(PreLaunchHook): + """Launch AYON menu on start of Fusion""" + app_groups = ["fusion"] + order = 9 + + def execute(self): + # Prelaunch hook is optional + settings = self.data["project_settings"][self.host_name] + if not settings["hooks"]["FusionLaunchMenuHook"]["enabled"]: + return + + variant = self.application.name + if variant.isnumeric(): + version = int(variant) + if version < 18: + print("Skipping launch of OpenPype menu on Fusion start " + "because Fusion version below 18.0 does not support " + "/execute argument on launch. " + f"Version detected: {version}") + return + else: + print(f"Application variant is not numeric: {variant}. " + "Validation for Fusion version 18+ for /execute " + "prelaunch argument skipped.") + + path = os.path.join(FUSION_HOST_DIR, + "deploy", + "MenuScripts", + "launch_menu.py").replace("\\", "/") + script = f"fusion:RunScript('{path}')" + self.launch_context.launch_args.extend(["/execute", script]) diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py index 5aa2783129..1064d0a83a 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_profile_hook.py @@ -7,7 +7,7 @@ from ayon_core.hosts.fusion import ( FUSION_VERSIONS_DICT, get_fusion_version, ) -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, @@ -19,7 +19,7 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook): Prepares local Fusion profile directory, copies existing Fusion profile. This also sets FUSION MasterPrefs variable, which is used to apply Master.prefs file to override some Fusion profile settings to: - - enable the OpenPype menu + - enable the AYON menu - force Python 3 over Python 2 - force English interface Master.prefs is defined in openpype/hosts/fusion/deploy/fusion_shared.prefs diff --git a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py index 7eaf2ddc02..ef084b0483 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_fusion_setup.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, LaunchTypes, ApplicationLaunchFailed, @@ -13,7 +13,7 @@ from ayon_core.hosts.fusion import ( class FusionPrelaunch(PreLaunchHook): """ - Prepares OpenPype Fusion environment. + Prepares AYON Fusion environment. Requires correct Python home variable to be defined in the environment settings for Fusion to point at a valid Python 3 build for Fusion. Python3 versions that are supported by Fusion: diff --git a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py index a9db39e24e..4678d5bac7 100644 --- a/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py +++ b/client/ayon_core/hosts/fusion/hooks/pre_pyside_install.py @@ -3,7 +3,7 @@ import subprocess import platform import uuid -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InstallPySideToFusion(PreLaunchHook): @@ -85,7 +85,6 @@ class InstallPySideToFusion(PreLaunchHook): administration rights. """ try: - import win32api import win32con import win32process import win32event diff --git a/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py b/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py index 8110898ae9..729843d078 100644 --- a/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py +++ b/client/ayon_core/hosts/fusion/plugins/create/create_image_saver.py @@ -1,7 +1,6 @@ from ayon_core.lib import NumberDef from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver -from ayon_core.hosts.fusion.api import get_current_comp class CreateImageSaver(GenericCreateSaver): diff --git a/client/ayon_core/hosts/fusion/plugins/create/create_saver.py b/client/ayon_core/hosts/fusion/plugins/create/create_saver.py index b5abb2d949..20c7b99851 100644 --- a/client/ayon_core/hosts/fusion/plugins/create/create_saver.py +++ b/client/ayon_core/hosts/fusion/plugins/create/create_saver.py @@ -1,6 +1,11 @@ -from ayon_core.lib import EnumDef +from ayon_core.lib import ( + UILabelDef, + NumberDef, + EnumDef +) from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver +from ayon_core.hosts.fusion.api.lib import get_current_comp class CreateSaver(GenericCreateSaver): @@ -15,7 +20,7 @@ class CreateSaver(GenericCreateSaver): product_type = "render" description = "Fusion Saver to generate image sequence" - default_frame_range_option = "asset_db" + default_frame_range_option = "current_folder" def get_detail_description(self): return """Fusion Saver to generate image sequence. @@ -24,7 +29,7 @@ class CreateSaver(GenericCreateSaver): product type. (But can publish even single frame 'render'.) Select what should be source of render range: - - "Current asset context" - values set on Asset in DB (Ftrack) + - "Current Folder context" - values set on folder on AYON server - "From render in/out" - from node itself - "From composition timeline" - from timeline @@ -45,14 +50,16 @@ class CreateSaver(GenericCreateSaver): self._get_reviewable_bool(), self._get_frame_range_enum(), self._get_image_format_enum(), + *self._get_custom_frame_range_attribute_defs() ] return attr_defs def _get_frame_range_enum(self): frame_range_options = { - "asset_db": "Current asset context", + "current_folder": "Current Folder context", "render_range": "From render in/out", "comp_range": "From composition timeline", + "custom_range": "Custom frame range", } return EnumDef( @@ -61,3 +68,82 @@ class CreateSaver(GenericCreateSaver): label="Frame range source", default=self.default_frame_range_option ) + + @staticmethod + def _get_custom_frame_range_attribute_defs() -> list: + + # Define custom frame range defaults based on current comp + # timeline settings (if a comp is currently open) + comp = get_current_comp() + if comp is not None: + attrs = comp.GetAttrs() + frame_defaults = { + "frameStart": int(attrs["COMPN_GlobalStart"]), + "frameEnd": int(attrs["COMPN_GlobalEnd"]), + "handleStart": int( + attrs["COMPN_RenderStart"] - attrs["COMPN_GlobalStart"] + ), + "handleEnd": int( + attrs["COMPN_GlobalEnd"] - attrs["COMPN_RenderEnd"] + ), + } + else: + frame_defaults = { + "frameStart": 1001, + "frameEnd": 1100, + "handleStart": 0, + "handleEnd": 0 + } + + return [ + UILabelDef( + label="
Custom Frame Range
" + "only used with 'Custom frame range' source" + ), + NumberDef( + "custom_frameStart", + label="Frame Start", + default=frame_defaults["frameStart"], + minimum=0, + decimals=0, + tooltip=( + "Set the start frame for the export.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ), + NumberDef( + "custom_frameEnd", + label="Frame End", + default=frame_defaults["frameEnd"], + minimum=0, + decimals=0, + tooltip=( + "Set the end frame for the export.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ), + NumberDef( + "custom_handleStart", + label="Handle Start", + default=frame_defaults["handleStart"], + minimum=0, + decimals=0, + tooltip=( + "Set the start handles for the export, this will be " + "added before the start frame.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ), + NumberDef( + "custom_handleEnd", + label="Handle End", + default=frame_defaults["handleEnd"], + minimum=0, + decimals=0, + tooltip=( + "Set the end handles for the export, this will be added " + "after the end frame.\n" + "Only used if frame range source is 'Custom frame range'." + ) + ) + ] diff --git a/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py b/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py index dfd9da3df1..a2fe027ef4 100644 --- a/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/fusion/plugins/create/create_workfile.py @@ -1,7 +1,8 @@ +import ayon_api + from ayon_core.hosts.fusion.api import ( get_current_comp ) -from ayon_core.client import get_asset_by_name from ayon_core.pipeline import ( AutoCreator, CreatedInstance, @@ -54,7 +55,6 @@ class FusionWorkfileCreator(AutoCreator): comp.SetData(self.data_key, data) def create(self, options=None): - comp = get_current_comp() if not comp: self.log.error("Unable to find current comp") @@ -67,33 +67,37 @@ class FusionWorkfileCreator(AutoCreator): break project_name = self.create_context.get_current_project_name() - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() task_name = self.create_context.get_current_task_name() host_name = self.create_context.host_name - if existing_instance is None: - existing_instance_asset = None - else: - existing_instance_asset = existing_instance["folderPath"] + existing_folder_path = None + if existing_instance is not None: + existing_folder_path = existing_instance["folderPath"] if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": self.default_variant, } data.update(self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, None @@ -107,17 +111,22 @@ class FusionWorkfileCreator(AutoCreator): self._add_instance_to_context(new_instance) elif ( - existing_instance_asset != asset_name + existing_folder_path != folder_path or existing_instance["task"] != task_name ): - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_path existing_instance["task"] = task_name existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/fusion/plugins/load/actions.py b/client/ayon_core/hosts/fusion/plugins/load/actions.py index f67878bcff..95400ea41c 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/actions.py +++ b/client/ayon_core/hosts/fusion/plugins/load/actions.py @@ -8,14 +8,16 @@ from ayon_core.pipeline import load class FusionSetFrameRangeLoader(load.LoaderPlugin): """Set frame range excluding pre- and post-handles""" - families = ["animation", - "camera", - "imagesequence", - "render", - "yeticache", - "pointcache", - "render"] - representations = ["*"] + product_types = { + "animation", + "camera", + "imagesequence", + "render", + "yeticache", + "pointcache", + "render", + } + representations = {"*"} extensions = {"*"} label = "Set frame range" @@ -27,11 +29,10 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin): from ayon_core.hosts.fusion.api import lib - version = context['version'] - version_data = version.get("data", {}) + version_attributes = context["version"]["attrib"] - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + start = version_attributes.get("frameStart", None) + end = version_attributes.get("frameEnd", None) if start is None or end is None: print("Skipping setting frame range because start or " @@ -44,14 +45,16 @@ class FusionSetFrameRangeLoader(load.LoaderPlugin): class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin): """Set frame range including pre- and post-handles""" - families = ["animation", - "camera", - "imagesequence", - "render", - "yeticache", - "pointcache", - "render"] - representations = ["*"] + product_types = { + "animation", + "camera", + "imagesequence", + "render", + "yeticache", + "pointcache", + "render", + } + representations = {"*"} label = "Set frame range (with handles)" order = 12 @@ -62,11 +65,9 @@ class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin): from ayon_core.hosts.fusion.api import lib - version = context['version'] - version_data = version.get("data", {}) - - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + version_attributes = context["version"]["attrib"] + start = version_attributes.get("frameStart", None) + end = version_attributes.get("frameEnd", None) if start is None or end is None: print("Skipping setting frame range because start or " @@ -74,7 +75,7 @@ class FusionSetFrameRangeWithHandlesLoader(load.LoaderPlugin): return # Include handles - start -= version_data.get("handleStart", 0) - end += version_data.get("handleEnd", 0) + start -= version_attributes.get("handleStart", 0) + end += version_attributes.get("handleEnd", 0) lib.update_frame_range(start, end) diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py b/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py index 0bc7ffd180..312362caca 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_alembic.py @@ -12,8 +12,8 @@ from ayon_core.hosts.fusion.api import ( class FusionLoadAlembicMesh(load.LoaderPlugin): """Load Alembic mesh into Fusion""" - families = ["pointcache", "model"] - representations = ["*"] + product_types = {"pointcache", "model"} + representations = {"*"} extensions = {"abc"} label = "Load alembic mesh" @@ -24,9 +24,9 @@ class FusionLoadAlembicMesh(load.LoaderPlugin): tool_type = "SurfaceAlembicMesh" def load(self, context, name, namespace, data): - # Fallback to asset name when namespace is None + # Fallback to folder name when namespace is None if namespace is None: - namespace = context['asset']['name'] + namespace = context["folder"]["name"] # Create the Loader with the filename path set comp = get_current_comp() @@ -44,23 +44,24 @@ class FusionLoadAlembicMesh(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update Alembic path""" tool = container["_tool"] assert tool.ID == self.tool_type, f"Must be {self.tool_type}" comp = tool.Comp() - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) with comp_lock_and_undo_chunk(comp, "Update tool"): tool["Filename"] = path # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", repre_entity["id"]) def remove(self, container): tool = container["_tool"] diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py b/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py index 3751d7cc39..a84e7e0914 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_fbx.py @@ -12,8 +12,8 @@ from ayon_core.hosts.fusion.api import ( class FusionLoadFBXMesh(load.LoaderPlugin): """Load FBX mesh into Fusion""" - families = ["*"] - representations = ["*"] + product_types = {"*"} + representations = {"*"} extensions = { "3ds", "amc", @@ -38,9 +38,9 @@ class FusionLoadFBXMesh(load.LoaderPlugin): tool_type = "SurfaceFBXMesh" def load(self, context, name, namespace, data): - # Fallback to asset name when namespace is None + # Fallback to folder name when namespace is None if namespace is None: - namespace = context["asset"]["name"] + namespace = context["folder"]["name"] # Create the Loader with the filename path set comp = get_current_comp() @@ -59,23 +59,24 @@ class FusionLoadFBXMesh(load.LoaderPlugin): loader=self.__class__.__name__, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update path""" tool = container["_tool"] assert tool.ID == self.tool_type, f"Must be {self.tool_type}" comp = tool.Comp() - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) with comp_lock_and_undo_chunk(comp, "Update tool"): tool["ImportFile"] = path # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", repre_entity["id"]) def remove(self, container): tool = container["_tool"] diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py b/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py index ad737aabed..7c70b54e48 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_sequence.py @@ -1,7 +1,6 @@ import contextlib import ayon_core.pipeline.load as load -from ayon_core.pipeline.load import get_representation_context from ayon_core.hosts.fusion.api import ( imprint_container, get_current_comp, @@ -130,15 +129,15 @@ def loader_shift(loader, frame, relative=True): class FusionLoadSequence(load.LoaderPlugin): """Load image sequence into Fusion""" - families = [ + product_types = { "imagesequence", "review", "render", "plate", "image", "online", - ] - representations = ["*"] + } + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) @@ -149,9 +148,9 @@ class FusionLoadSequence(load.LoaderPlugin): color = "orange" def load(self, context, name, namespace, data): - # Fallback to asset name when namespace is None + # Fallback to folder name when namespace is None if namespace is None: - namespace = context["asset"]["name"] + namespace = context["folder"]["name"] # Use the first file for now path = self.filepath_from_context(context) @@ -175,10 +174,10 @@ class FusionLoadSequence(load.LoaderPlugin): loader=self.__class__.__name__, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Fusion automatically tries to reset some variables when changing @@ -224,7 +223,7 @@ class FusionLoadSequence(load.LoaderPlugin): assert tool.ID == "Loader", "Must be Loader" comp = tool.Comp() - context = get_representation_context(representation) + repre_entity = context["representation"] path = self.filepath_from_context(context) # Get start frame from version data @@ -255,7 +254,7 @@ class FusionLoadSequence(load.LoaderPlugin): ) # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", repre_entity["id"]) def remove(self, container): tool = container["_tool"] @@ -265,17 +264,17 @@ class FusionLoadSequence(load.LoaderPlugin): with comp_lock_and_undo_chunk(comp, "Remove Loader"): tool.Delete() - def _get_start(self, version_doc, tool): + def _get_start(self, version_entity, tool): """Return real start frame of published files (incl. handles)""" - data = version_doc["data"] + attributes = version_entity["attrib"] # Get start frame directly with handle if it's in data - start = data.get("frameStartHandle") + start = attributes.get("frameStartHandle") if start is not None: return start # Get frame start without handles - start = data.get("frameStart") + start = attributes.get("frameStart") if start is None: self.log.warning( "Missing start frame for version " @@ -285,7 +284,7 @@ class FusionLoadSequence(load.LoaderPlugin): return 0 # Use `handleStart` if the data is available - handle_start = data.get("handleStart") + handle_start = attributes.get("handleStart") if handle_start: start -= handle_start diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_usd.py b/client/ayon_core/hosts/fusion/plugins/load/load_usd.py index de56d040ac..309b0c094c 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_usd.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_usd.py @@ -16,8 +16,8 @@ class FusionLoadUSD(load.LoaderPlugin): Support for USD was added since Fusion 18.5 """ - families = ["*"] - representations = ["*"] + product_types = {"*"} + representations = {"*"} extensions = {"usd", "usda", "usdz"} label = "Load USD" @@ -40,9 +40,9 @@ class FusionLoadUSD(load.LoaderPlugin): cls.enabled = is_usd_supported def load(self, context, name, namespace, data): - # Fallback to asset name when namespace is None + # Fallback to folder name when namespace is None if namespace is None: - namespace = context['asset']['name'] + namespace = context["folder"]["name"] # Create the Loader with the filename path set comp = get_current_comp() @@ -60,22 +60,23 @@ class FusionLoadUSD(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): tool = container["_tool"] assert tool.ID == self.tool_type, f"Must be {self.tool_type}" comp = tool.Comp() - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) with comp_lock_and_undo_chunk(comp, "Update tool"): tool["Filename"] = path # Update the imprinted representation - tool.SetData("avalon.representation", str(representation["_id"])) + tool.SetData("avalon.representation", repre_entity["id"]) def remove(self, container): tool = container["_tool"] diff --git a/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py b/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py index d50fded502..818fbcb187 100644 --- a/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py +++ b/client/ayon_core/hosts/fusion/plugins/load/load_workfile.py @@ -14,8 +14,8 @@ from ayon_core.hosts.fusion.api import ( class FusionLoadWorkfile(load.LoaderPlugin): """Load the content of a workfile into Fusion""" - families = ["workfile"] - representations = ["*"] + product_types = {"workfile"} + representations = {"*"} extensions = {"comp"} label = "Load Workfile" diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py index 2cbd4d82f4..921c282877 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_instances.py @@ -25,8 +25,8 @@ class CollectInstanceData(pyblish.api.InstancePlugin): frame_range_source = creator_attributes.get("frame_range_source") instance.data["frame_range_source"] = frame_range_source - # get asset frame ranges to all instances - # render product type instances `asset_db` render target + # get folder frame ranges to all instances + # render product type instances `current_folder` render target start = context.data["frameStart"] end = context.data["frameEnd"] handle_start = context.data["handleStart"] @@ -57,6 +57,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin): start_with_handle = comp_start end_with_handle = comp_end + if frame_range_source == "custom_range": + start = int(instance.data["custom_frameStart"]) + end = int(instance.data["custom_frameEnd"]) + handle_start = int(instance.data["custom_handleStart"]) + handle_end = int(instance.data["custom_handleEnd"]) + start_with_handle = start - handle_start + end_with_handle = end + handle_end + frame = instance.data["creator_attributes"].get("frame") # explicitly publishing only single frame if frame is not None: diff --git a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py index 36102d02cb..7a2844d5db 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/collect_render.py @@ -37,14 +37,13 @@ class CollectFusionRender( aspect_x = comp_frame_format_prefs["AspectX"] aspect_y = comp_frame_format_prefs["AspectY"] - instances = [] - instances_to_remove = [] current_file = context.data["currentFile"] version = context.data["version"] project_entity = context.data["projectEntity"] + instances = [] for inst in context: if not inst.data.get("active", True): continue @@ -91,7 +90,10 @@ class CollectFusionRender( frameStep=1, fps=comp_frame_format_prefs.get("Rate"), app_version=comp.GetApp().Version, - publish_attributes=inst.data.get("publish_attributes", {}) + publish_attributes=inst.data.get("publish_attributes", {}), + + # The source instance this render instance replaces + source_instance=inst ) render_target = inst.data["creator_attributes"]["render_target"] @@ -114,13 +116,7 @@ class CollectFusionRender( # to skip ExtractReview locally instance.families.remove("review") - # add new instance to the list and remove the original - # instance since it is not needed anymore instances.append(instance) - instances_to_remove.append(inst) - - for instance in instances_to_remove: - context.remove(instance) return instances diff --git a/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py b/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py index 23a8cdb8a0..39fa20cfc0 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/extract_render_local.py @@ -70,10 +70,10 @@ class FusionRenderLocal( # Log render status self.log.info( - "Rendered '{nm}' for asset '{ast}' under the task '{tsk}'".format( - nm=instance.data["name"], - ast=instance.data["folderPath"], - tsk=instance.data["task"], + "Rendered '{}' for folder '{}' under the task '{}'".format( + instance.data["name"], + instance.data["folderPath"], + instance.data["task"], ) ) diff --git a/client/ayon_core/hosts/fusion/plugins/publish/validate_saver_resolution.py b/client/ayon_core/hosts/fusion/plugins/publish/validate_saver_resolution.py index af8d4f41fa..17992b123c 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/validate_saver_resolution.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/validate_saver_resolution.py @@ -11,10 +11,10 @@ from ayon_core.hosts.fusion.api import comp_lock_and_undo_chunk class ValidateSaverResolution( pyblish.api.InstancePlugin, OptionalPyblishPluginMixin ): - """Validate that the saver input resolution matches the asset resolution""" + """Validate that the saver input resolution matches the folder resolution""" order = pyblish.api.ValidatorOrder - label = "Validate Asset Resolution" + label = "Validate Folder Resolution" families = ["render", "image"] hosts = ["fusion"] optional = True @@ -29,7 +29,7 @@ class ValidateSaverResolution( if resolution != expected_resolution: raise PublishValidationError( "The input's resolution does not match " - "the asset's resolution {}x{}.\n\n" + "the folder's resolution {}x{}.\n\n" "The input's resolution is {}x{}.".format( expected_resolution[0], expected_resolution[1], resolution[0], resolution[1] @@ -55,8 +55,8 @@ class ValidateSaverResolution( @classmethod def get_expected_resolution(cls, instance): - data = instance.data["assetEntity"]["data"] - return data["resolutionWidth"], data["resolutionHeight"] + attributes = instance.data["folderEntity"]["attrib"] + return attributes["resolutionWidth"], attributes["resolutionHeight"] @classmethod def get_tool_resolution(cls, tool, frame): diff --git a/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py b/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py index 939ddbd117..bcd9abd8b0 100644 --- a/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py +++ b/client/ayon_core/hosts/fusion/plugins/publish/validate_unique_subsets.py @@ -10,7 +10,7 @@ class ValidateUniqueSubsets(pyblish.api.ContextPlugin): """Ensure all instances have a unique product name""" order = pyblish.api.ValidatorOrder - label = "Validate Unique Subsets" + label = "Validate Unique Products" families = ["render", "image"] hosts = ["fusion"] actions = [SelectInvalidAction] @@ -27,7 +27,7 @@ class ValidateUniqueSubsets(pyblish.api.ContextPlugin): instance ) - # Find which asset + subset combination has more than one instance + # Find which folder + subset combination has more than one instance # Those are considered invalid because they'd integrate to the same # destination. invalid = [] diff --git a/client/ayon_core/hosts/harmony/__init__.py b/client/ayon_core/hosts/harmony/__init__.py index 9177eaa285..6454d6f9d7 100644 --- a/client/ayon_core/hosts/harmony/__init__.py +++ b/client/ayon_core/hosts/harmony/__init__.py @@ -1,10 +1,12 @@ from .addon import ( - HARMONY_HOST_DIR, + HARMONY_ADDON_ROOT, HarmonyAddon, + get_launch_script_path, ) __all__ = ( - "HARMONY_HOST_DIR", + "HARMONY_ADDON_ROOT", "HarmonyAddon", + "get_launch_script_path", ) diff --git a/client/ayon_core/hosts/harmony/addon.py b/client/ayon_core/hosts/harmony/addon.py index 476d569415..1915a7eb6f 100644 --- a/client/ayon_core/hosts/harmony/addon.py +++ b/client/ayon_core/hosts/harmony/addon.py @@ -1,7 +1,7 @@ import os from ayon_core.addon import AYONAddon, IHostAddon -HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) +HARMONY_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) class HarmonyAddon(AYONAddon, IHostAddon): @@ -11,10 +11,23 @@ class HarmonyAddon(AYONAddon, IHostAddon): def add_implementation_envs(self, env, _app): """Modify environments to contain all required for implementation.""" openharmony_path = os.path.join( - HARMONY_HOST_DIR, "vendor", "OpenHarmony" + HARMONY_ADDON_ROOT, "vendor", "OpenHarmony" ) # TODO check if is already set? What to do if is already set? env["LIB_OPENHARMONY_PATH"] = openharmony_path def get_workfile_extensions(self): return [".zip"] + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(HARMONY_ADDON_ROOT, "hooks") + ] + + +def get_launch_script_path(): + return os.path.join( + HARMONY_ADDON_ROOT, "api", "launch_script.py" + ) diff --git a/client/ayon_core/hosts/harmony/api/README.md b/client/ayon_core/hosts/harmony/api/README.md index 5b90d45f98..b8d1dbc100 100644 --- a/client/ayon_core/hosts/harmony/api/README.md +++ b/client/ayon_core/hosts/harmony/api/README.md @@ -204,7 +204,7 @@ class CreateComposite(harmony.Creator): name = "compositeDefault" label = "Composite" - product_type = "mindbender.template" + product_type = "template" def __init__(self, *args, **kwargs): super(CreateComposite, self).__init__(*args, **kwargs) @@ -221,7 +221,7 @@ class CreateRender(harmony.Creator): name = "writeDefault" label = "Write" - product_type = "mindbender.imagesequence" + product_type = "render" node_type = "WRITE" def __init__(self, *args, **kwargs): @@ -304,7 +304,7 @@ class ExtractImage(pyblish.api.InstancePlugin): label = "Extract Image Sequence" order = pyblish.api.ExtractorOrder hosts = ["harmony"] - families = ["mindbender.imagesequence"] + families = ["render"] def process(self, instance): project_path = harmony.send( @@ -582,8 +582,16 @@ class ImageSequenceLoader(load.LoaderPlugin): """Load images Stores the imported asset in a container named after the asset. """ - families = ["mindbender.imagesequence"] - representations = ["*"] + product_types = { + "shot", + "render", + "image", + "plate", + "reference", + "review", + } + representations = {"*"} + extensions = {"jpeg", "png", "jpg"} def load(self, context, name=None, namespace=None, data=None): files = [] @@ -597,7 +605,7 @@ class ImageSequenceLoader(load.LoaderPlugin): read_node = harmony.send( { "function": copy_files + import_files, - "args": ["Top", files, context["version"]["data"]["subset"], 1] + "args": ["Top", files, context["product"]["name"], 1] } )["result"] @@ -611,11 +619,12 @@ class ImageSequenceLoader(load.LoaderPlugin): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): node = container.pop("node") + repre_entity = context["representation"] project_name = get_current_project_name() - version = get_version_by_id(project_name, representation["parent"]) + version = get_version_by_id(project_name, repre_entity["versionId"]) files = [] for f in version["data"]["files"]: files.append( @@ -632,7 +641,7 @@ class ImageSequenceLoader(load.LoaderPlugin): ) harmony.imprint( - node, {"representation": str(representation["_id"])} + node, {"representation": repre_entity["id"]} ) def remove(self, container): @@ -648,8 +657,8 @@ class ImageSequenceLoader(load.LoaderPlugin): {"function": func, "args": [node]} ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) ``` ## Resources diff --git a/client/ayon_core/hosts/harmony/api/__init__.py b/client/ayon_core/hosts/harmony/api/__init__.py index d890215f63..bdcc43ea2f 100644 --- a/client/ayon_core/hosts/harmony/api/__init__.py +++ b/client/ayon_core/hosts/harmony/api/__init__.py @@ -11,7 +11,7 @@ from .pipeline import ( select_instance, containerise, set_scene_settings, - get_asset_settings, + get_current_context_settings, ensure_scene_settings, check_inventory, application_launch, @@ -55,7 +55,7 @@ __all__ = [ "select_instance", "containerise", "set_scene_settings", - "get_asset_settings", + "get_current_context_settings", "ensure_scene_settings", "check_inventory", "application_launch", diff --git a/client/ayon_core/hosts/harmony/api/launch_script.py b/client/ayon_core/hosts/harmony/api/launch_script.py new file mode 100644 index 0000000000..3c809e210f --- /dev/null +++ b/client/ayon_core/hosts/harmony/api/launch_script.py @@ -0,0 +1,93 @@ +"""Script wraps launch mechanism of Harmony implementations. + +Arguments passed to the script are passed to launch function in host +implementation. In all cases requires host app executable and may contain +workfile or others. +""" + +import os +import sys + +from ayon_core.hosts.harmony.api.lib import main as host_main + +# Get current file to locate start point of sys.argv +CURRENT_FILE = os.path.abspath(__file__) + + +def show_error_messagebox(title, message, detail_message=None): + """Function will show message and process ends after closing it.""" + from qtpy import QtWidgets, QtCore + from ayon_core import style + + app = QtWidgets.QApplication([]) + app.setStyleSheet(style.load_stylesheet()) + + msgbox = QtWidgets.QMessageBox() + msgbox.setWindowTitle(title) + msgbox.setText(message) + + if detail_message: + msgbox.setDetailedText(detail_message) + + msgbox.setWindowModality(QtCore.Qt.ApplicationModal) + msgbox.show() + + sys.exit(app.exec_()) + + +def on_invalid_args(script_not_found): + """Show to user message box saying that something went wrong. + + Tell user that arguments to launch implementation are invalid with + arguments details. + + Args: + script_not_found (bool): Use different message based on this value. + """ + + title = "Invalid arguments" + joined_args = ", ".join("\"{}\"".format(arg) for arg in sys.argv) + if script_not_found: + submsg = "Where couldn't find script path:\n\"{}\"" + else: + submsg = "Expected Host executable after script path:\n\"{}\"" + + message = "BUG: Got invalid arguments so can't launch Host application." + detail_message = "Process was launched with arguments:\n{}\n\n{}".format( + joined_args, + submsg.format(CURRENT_FILE) + ) + + show_error_messagebox(title, message, detail_message) + + +def main(argv): + # Modify current file path to find match in sys.argv which may be different + # on windows (different letter cases and slashes). + modified_current_file = CURRENT_FILE.replace("\\", "/").lower() + + # Create a copy of sys argv + sys_args = list(argv) + after_script_idx = None + # Find script path in sys.argv to know index of argv where host + # executable should be. + for idx, item in enumerate(sys_args): + if item.replace("\\", "/").lower() == modified_current_file: + after_script_idx = idx + 1 + break + + # Validate that there is at least one argument after script path + launch_args = None + if after_script_idx is not None: + launch_args = sys_args[after_script_idx:] + + if launch_args: + # Launch host implementation + host_main(*launch_args) + else: + # Show message box + on_invalid_args(after_script_idx is None) + + +if __name__ == "__main__": + main(sys.argv) diff --git a/client/ayon_core/hosts/harmony/api/lib.py b/client/ayon_core/hosts/harmony/api/lib.py index bc73e19066..f9980cb65e 100644 --- a/client/ayon_core/hosts/harmony/api/lib.py +++ b/client/ayon_core/hosts/harmony/api/lib.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- """Utility functions used for Avalon - Harmony integration.""" +import platform import subprocess import threading import os @@ -14,15 +15,16 @@ import json import signal import time from uuid import uuid4 -from qtpy import QtWidgets, QtCore, QtGui import collections -from .server import Server +from qtpy import QtWidgets, QtCore, QtGui +from ayon_core.lib import is_using_ayon_console from ayon_core.tools.stdout_broker.app import StdOutBroker from ayon_core.tools.utils import host_tools from ayon_core import style -from ayon_core.lib.applications import get_non_python_host_kwargs + +from .server import Server # Setup logging. log = logging.getLogger(__name__) @@ -324,7 +326,18 @@ def launch_zip_file(filepath): return print("Launching {}".format(scene_path)) - kwargs = get_non_python_host_kwargs({}, False) + # QUESTION Could we use 'run_detached_process' from 'ayon_core.lib'? + kwargs = {} + if ( + platform.system().lower() == "windows" + and not is_using_ayon_console() + ): + kwargs.update({ + "creationflags": subprocess.CREATE_NO_WINDOW, + "stdout": subprocess.DEVNULL, + "stderr": subprocess.DEVNULL + }) + process = subprocess.Popen( [ProcessContext.application_path, scene_path], **kwargs @@ -555,7 +568,7 @@ def save_scene(): """Save the Harmony scene safely. The built-in (to Avalon) background zip and moving of the Harmony scene - folder, interfers with server/client communication by sending two requests + folder, interferes with server/client communication by sending two requests at the same time. This only happens when sending "scene.saveAll()". This method prevents this double request and safely saves the scene. diff --git a/client/ayon_core/hosts/harmony/api/pipeline.py b/client/ayon_core/hosts/harmony/api/pipeline.py index 863053dddc..1e3ea0ba21 100644 --- a/client/ayon_core/hosts/harmony/api/pipeline.py +++ b/client/ayon_core/hosts/harmony/api/pipeline.py @@ -13,15 +13,15 @@ from ayon_core.pipeline import ( AVALON_CONTAINER_ID, ) from ayon_core.pipeline.load import get_outdated_containers -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_folder_entity -from ayon_core.hosts.harmony import HARMONY_HOST_DIR +from ayon_core.hosts.harmony import HARMONY_ADDON_ROOT import ayon_core.hosts.harmony.api as harmony log = logging.getLogger("ayon_core.hosts.harmony") -PLUGINS_DIR = os.path.join(HARMONY_HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(HARMONY_ADDON_ROOT, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -42,24 +42,25 @@ def set_scene_settings(settings): {"function": "PypeHarmony.setSceneSettings", "args": settings}) -def get_asset_settings(): - """Get settings on current asset from database. +def get_current_context_settings(): + """Get settings on current folder from server. Returns: dict: Scene data. """ - asset_doc = get_current_project_asset() - asset_data = asset_doc["data"] - fps = asset_data.get("fps") - frame_start = asset_data.get("frameStart") - frame_end = asset_data.get("frameEnd") - handle_start = asset_data.get("handleStart") - handle_end = asset_data.get("handleEnd") - resolution_width = asset_data.get("resolutionWidth") - resolution_height = asset_data.get("resolutionHeight") - entity_type = asset_data.get("entityType") + folder_entity = get_current_folder_entity() + folder_attributes = folder_entity["attrib"] + + fps = folder_attributes.get("fps") + frame_start = folder_attributes.get("frameStart") + frame_end = folder_attributes.get("frameEnd") + handle_start = folder_attributes.get("handleStart") + handle_end = folder_attributes.get("handleEnd") + resolution_width = folder_attributes.get("resolutionWidth") + resolution_height = folder_attributes.get("resolutionHeight") + entity_type = folder_attributes.get("entityType") scene_data = { "fps": fps, @@ -77,7 +78,7 @@ def get_asset_settings(): def ensure_scene_settings(): """Validate if Harmony scene has valid settings.""" - settings = get_asset_settings() + settings = get_current_context_settings() invalid_settings = [] valid_settings = {} @@ -336,7 +337,7 @@ def containerise(name, "name": name, "namespace": namespace, "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "nodes": nodes } diff --git a/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py new file mode 100644 index 0000000000..4d38cd09b3 --- /dev/null +++ b/client/ayon_core/hosts/harmony/hooks/pre_launch_args.py @@ -0,0 +1,88 @@ +import os +import platform +import subprocess + +from ayon_core.lib import ( + get_ayon_launcher_args, + is_using_ayon_console, +) +from ayon_applications import PreLaunchHook, LaunchTypes +from ayon_core.hosts.harmony import get_launch_script_path + + +def get_launch_kwargs(kwargs): + """Explicit setting of kwargs for Popen for Harmony. + + Expected behavior + - ayon_console opens window with logs + - ayon has stdout/stderr available for capturing + + Args: + kwargs (Union[dict, None]): Current kwargs or None. + + """ + if kwargs is None: + kwargs = {} + + if platform.system().lower() != "windows": + return kwargs + + if is_using_ayon_console(): + kwargs.update({ + "creationflags": subprocess.CREATE_NEW_CONSOLE + }) + else: + kwargs.update({ + "creationflags": subprocess.CREATE_NO_WINDOW, + "stdout": subprocess.DEVNULL, + "stderr": subprocess.DEVNULL + }) + return kwargs + + +class HarmonyPrelaunchHook(PreLaunchHook): + """Launch arguments preparation. + + Hook add python executable and script path to Harmony implementation + before Harmony executable and add last workfile path to launch arguments. + + Existence of last workfile is checked. If workfile does not exists tries + to copy templated workfile from predefined path. + """ + app_groups = {"harmony"} + + order = 20 + launch_types = {LaunchTypes.local} + + def execute(self): + # Pop executable + executable_path = self.launch_context.launch_args.pop(0) + + # Pop rest of launch arguments - There should not be other arguments! + remainders = [] + while self.launch_context.launch_args: + remainders.append(self.launch_context.launch_args.pop(0)) + + script_path = get_launch_script_path() + + new_launch_args = get_ayon_launcher_args( + "run", script_path, executable_path + ) + # Add workfile path if exists + workfile_path = self.data["last_workfile_path"] + if ( + self.data.get("start_last_workfile") + and workfile_path + and os.path.exists(workfile_path) + ): + new_launch_args.append(workfile_path) + + # Append as whole list as these arguments should not be separated + self.launch_context.launch_args.append(new_launch_args) + + if remainders: + self.launch_context.launch_args.extend(remainders) + + self.launch_context.kwargs = get_launch_kwargs( + self.launch_context.kwargs + ) diff --git a/client/ayon_core/hosts/harmony/js/PypeHarmony.js b/client/ayon_core/hosts/harmony/js/PypeHarmony.js index 41c8dc56ce..cf6a5e3763 100644 --- a/client/ayon_core/hosts/harmony/js/PypeHarmony.js +++ b/client/ayon_core/hosts/harmony/js/PypeHarmony.js @@ -34,7 +34,7 @@ PypeHarmony.message = function(message) { /** - * Set scene setting based on shot/asset settngs. + * Set scene setting based on folder settngs. * @function * @param {obj} settings Scene settings. */ diff --git a/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js b/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js index ebbd7163f9..a2ca5f9a99 100644 --- a/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js +++ b/client/ayon_core/hosts/harmony/js/loaders/ImageSequenceLoader.js @@ -87,7 +87,7 @@ ImageSequenceLoader.getUniqueColumnName = function(columnPrefix) { * // Arguments are in following order: * var args = [ * files, // Files in file sequences. - * asset, // Asset name. + * folderName, // Folder name. * productName, // Product name. * startFrame, // Sequence starting frame. * groupId // Unique group ID (uuid4). @@ -105,7 +105,7 @@ ImageSequenceLoader.prototype.importFiles = function(args) { var doc = $.scn; var files = args[0]; - var asset = args[1]; + var folderName = args[1]; var productName = args[2]; var startFrame = args[3]; var groupId = args[4]; @@ -124,7 +124,7 @@ ImageSequenceLoader.prototype.importFiles = function(args) { var num = 0; var name = ''; do { - name = asset + '_' + (num++) + '_' + productName; + name = folderName + '_' + (num++) + '_' + productName; } while (currentGroup.getNodeByName(name) != null); extension = filename.substr(pos+1).toLowerCase(); diff --git a/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js b/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js index 78167fcb39..c29e12c43b 100644 --- a/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js +++ b/client/ayon_core/hosts/harmony/js/loaders/TemplateLoader.js @@ -30,7 +30,7 @@ var TemplateLoader = function() {}; * // arguments are in following order: * var args = [ * templatePath, // Path to tpl file. - * assetName, // Asset name. + * folderName, // Folder name. * productName, // Product name. * groupId // unique ID (uuid4) * ]; @@ -38,7 +38,7 @@ var TemplateLoader = function() {}; TemplateLoader.prototype.loadContainer = function(args) { var doc = $.scn; var templatePath = args[0]; - var assetName = args[1]; + var folderName = args[1]; var productName = args[2]; var groupId = args[3]; @@ -62,7 +62,7 @@ TemplateLoader.prototype.loadContainer = function(args) { var num = 0; var containerGroupName = ''; do { - containerGroupName = assetName + '_' + (num++) + '_' + productName; + containerGroupName = folderName + '_' + (num++) + '_' + productName; } while (currentGroup.getNodeByName(containerGroupName) != null); // import the template diff --git a/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py b/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py index 16c403de6a..3039d56ead 100644 --- a/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py +++ b/client/ayon_core/hosts/harmony/plugins/create/create_farm_render.py @@ -21,12 +21,12 @@ class CreateFarmRender(plugin.Creator): path = "render/{0}/{0}.".format(node.split("/")[-1]) harmony.send( { - "function": f"PypeHarmony.Creators.CreateRender.create", + "function": "PypeHarmony.Creators.CreateRender.create", "args": [node, path] }) harmony.send( { - "function": f"PypeHarmony.color", + "function": "PypeHarmony.color", "args": [[0.9, 0.75, 0.3, 1.0]] } ) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_audio.py b/client/ayon_core/hosts/harmony/plugins/load/load_audio.py index 1017d6c2a2..d23f3ed034 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_audio.py @@ -35,17 +35,17 @@ function %s(args) class ImportAudioLoader(load.LoaderPlugin): """Import audio.""" - families = ["shot", "audio"] - representations = ["wav"] + product_types = {"shot", "audio"} + representations = {"wav"} label = "Import Audio" def load(self, context, name=None, namespace=None, data=None): wav_file = get_representation_path(context["representation"]) harmony.send( - {"function": func, "args": [context["subset"]["name"], wav_file]} + {"function": func, "args": [context["product"]["name"], wav_file]} ) - product_name = context["subset"]["name"] + product_name = context["product"]["name"] return harmony.containerise( product_name, @@ -55,7 +55,7 @@ class ImportAudioLoader(load.LoaderPlugin): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): pass def remove(self, container): diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_background.py b/client/ayon_core/hosts/harmony/plugins/load/load_background.py index cc664bb24f..dad6ac2f22 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_background.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_background.py @@ -233,8 +233,8 @@ class BackgroundLoader(load.LoaderPlugin): """Load images Stores the imported asset in a container named after the asset. """ - families = ["background"] - representations = ["json"] + product_types = {"background"} + representations = {"json"} def load(self, context, name=None, namespace=None, data=None): @@ -254,7 +254,7 @@ class BackgroundLoader(load.LoaderPlugin): bg_folder = os.path.dirname(path) - product_name = context["subset"]["name"] + product_name = context["product"]["name"] # read_node_name += "_{}".format(uuid.uuid4()) container_nodes = [] @@ -280,8 +280,9 @@ class BackgroundLoader(load.LoaderPlugin): nodes=container_nodes ) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_entity = context["representation"] + path = get_representation_path(repre_entity) with open(path) as json_file: data = json.load(json_file) @@ -301,7 +302,7 @@ class BackgroundLoader(load.LoaderPlugin): print(container) - is_latest = is_representation_from_latest(representation) + is_latest = is_representation_from_latest(repre_entity) for layer in sorted(layers): file_to_import = [ os.path.join(bg_folder, layer).replace("\\", "/") @@ -351,8 +352,11 @@ class BackgroundLoader(load.LoaderPlugin): harmony.send({"function": func, "args": [node, "red"]}) harmony.imprint( - container['name'], {"representation": str(representation["_id"]), - "nodes": container['nodes']} + container['name'], + { + "representation": repre_entity["id"], + "nodes": container["nodes"] + } ) def remove(self, container): @@ -369,5 +373,5 @@ class BackgroundLoader(load.LoaderPlugin): ) harmony.imprint(container['name'], {}, remove=True) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py b/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py index db67ff1123..f81ebca9af 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_imagesequence.py @@ -20,8 +20,15 @@ class ImageSequenceLoader(load.LoaderPlugin): Stores the imported asset in a container named after the asset. """ - families = ["shot", "render", "image", "plate", "reference", "review"] - representations = ["*"] + product_types = { + "shot", + "render", + "image", + "plate", + "reference", + "review", + } + representations = {"*"} extensions = {"jpeg", "png", "jpg"} def load(self, context, name=None, namespace=None, data=None): @@ -46,8 +53,8 @@ class ImageSequenceLoader(load.LoaderPlugin): else: files.append(fname.parent.joinpath(remainder[0]).as_posix()) - asset = context["asset"]["name"] - product_name = context["subset"]["name"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] group_id = str(uuid.uuid4()) read_node = harmony.send( @@ -55,7 +62,7 @@ class ImageSequenceLoader(load.LoaderPlugin): "function": f"PypeHarmony.Loaders.{self_name}.importFiles", # noqa: E501 "args": [ files, - asset, + folder_name, product_name, 1, group_id @@ -64,7 +71,7 @@ class ImageSequenceLoader(load.LoaderPlugin): )["result"] return harmony.containerise( - f"{asset}_{product_name}", + f"{folder_name}_{product_name}", namespace, read_node, context, @@ -72,18 +79,19 @@ class ImageSequenceLoader(load.LoaderPlugin): nodes=[read_node] ) - def update(self, container, representation): + def update(self, container, context): """Update loaded containers. Args: container (dict): Container data. - representation (dict): Representation data. + context (dict): Representation context data. """ self_name = self.__class__.__name__ node = container.get("nodes").pop() - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) collections, remainder = clique.assemble( os.listdir(os.path.dirname(path)) ) @@ -110,7 +118,7 @@ class ImageSequenceLoader(load.LoaderPlugin): ) # Colour node. - if is_representation_from_latest(representation): + if is_representation_from_latest(repre_entity): harmony.send( { "function": "PypeHarmony.setColor", @@ -124,7 +132,7 @@ class ImageSequenceLoader(load.LoaderPlugin): }) harmony.imprint( - node, {"representation": str(representation["_id"])} + node, {"representation": repre_entity["id"]} ) def remove(self, container): @@ -140,6 +148,6 @@ class ImageSequenceLoader(load.LoaderPlugin): ) harmony.imprint(node, {}, remove=True) - def switch(self, container, representation): + def switch(self, container, context): """Switch loaded representations.""" - self.update(container, representation) + self.update(container, context) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_palette.py b/client/ayon_core/hosts/harmony/plugins/load/load_palette.py index 1794ffda5e..24f4b4e8d4 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_palette.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_palette.py @@ -11,8 +11,8 @@ import ayon_core.hosts.harmony.api as harmony class ImportPaletteLoader(load.LoaderPlugin): """Import palettes.""" - families = ["palette", "harmony.palette"] - representations = ["plt"] + product_types = {"palette", "harmony.palette"} + representations = {"plt"} label = "Import Palette" def load(self, context, name=None, namespace=None, data=None): @@ -26,15 +26,16 @@ class ImportPaletteLoader(load.LoaderPlugin): self.__class__.__name__ ) - def load_palette(self, representation): - product_name = representation["context"]["subset"] + def load_palette(self, context): + product_name = context["product"]["name"] + repre_entity = context["representation"] name = product_name.replace("palette", "") # Overwrite palette on disk. scene_path = harmony.send( {"function": "scene.currentProjectPath"} )["result"] - src = get_representation_path(representation) + src = get_representation_path(repre_entity) dst = os.path.join( scene_path, "palette-library", @@ -59,13 +60,14 @@ class ImportPaletteLoader(load.LoaderPlugin): def remove(self, container): harmony.remove(container["name"]) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): self.remove(container) - name = self.load_palette(representation) + name = self.load_palette(context) - container["representation"] = str(representation["_id"]) + repre_entity = context["representation"] + container["representation"] = repre_entity["id"] container["name"] = name harmony.imprint(name, container) diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_template.py b/client/ayon_core/hosts/harmony/plugins/load/load_template.py index d26f148c09..96dadb0375 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_template.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_template.py @@ -23,8 +23,8 @@ class TemplateLoader(load.LoaderPlugin): """ - families = ["template", "workfile"] - representations = ["*"] + product_types = {"template", "workfile"} + representations = {"*"} label = "Load Template" icon = "gift" @@ -52,8 +52,8 @@ class TemplateLoader(load.LoaderPlugin): { "function": f"PypeHarmony.Loaders.{self_name}.loadContainer", "args": [template_path, - context["asset"]["name"], - context["subset"]["name"], + context["folder"]["name"], + context["product"]["name"], group_id] } )["result"] @@ -70,19 +70,20 @@ class TemplateLoader(load.LoaderPlugin): self_name ) - def update(self, container, representation): + def update(self, container, context): """Update loaded containers. Args: container (dict): Container data. - representation (dict): Representation data. + context (dict): Representation context data. """ node_name = container["name"] node = harmony.find_node_by_name(node_name, "GROUP") self_name = self.__class__.__name__ - if is_representation_from_latest(representation): + repre_entity = context["representation"] + if is_representation_from_latest(repre_entity): self._set_green(node) else: self._set_red(node) @@ -110,7 +111,7 @@ class TemplateLoader(load.LoaderPlugin): None, container["data"]) harmony.imprint( - node, {"representation": str(representation["_id"])} + node, {"representation": repre_entity["id"]} ) def remove(self, container): @@ -125,9 +126,9 @@ class TemplateLoader(load.LoaderPlugin): {"function": "PypeHarmony.deleteNode", "args": [node]} ) - def switch(self, container, representation): + def switch(self, container, context): """Switch representation containers.""" - self.update(container, representation) + self.update(container, context) def _set_green(self, node): """Set node color to green `rgba(0, 255, 0, 255)`.""" diff --git a/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py b/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py index 65f4fe6d0a..fa5ffe5105 100644 --- a/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py +++ b/client/ayon_core/hosts/harmony/plugins/load/load_template_workfile.py @@ -13,8 +13,8 @@ import ayon_core.hosts.harmony.api as harmony class ImportTemplateLoader(load.LoaderPlugin): """Import templates.""" - families = ["harmony.template", "workfile"] - representations = ["*"] + product_types = {"harmony.template", "workfile"} + representations = {"*"} label = "Import Template" def load(self, context, name=None, namespace=None, data=None): @@ -40,7 +40,7 @@ class ImportTemplateLoader(load.LoaderPlugin): shutil.rmtree(temp_dir) - product_name = context["subset"]["name"] + product_name = context["product"]["name"] return harmony.containerise( product_name, @@ -50,16 +50,16 @@ class ImportTemplateLoader(load.LoaderPlugin): self.__class__.__name__ ) - def update(self, container, representation): - pass + def update(self, container, context): + pass - def remove(self, container): - pass + def remove(self, container): + pass class ImportWorkfileLoader(ImportTemplateLoader): """Import workfiles.""" - families = ["workfile"] - representations = ["zip"] + product_types = {"workfile"} + representations = {"zip"} label = "Import Workfile" diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py index 40b4107a62..cc959a23b9 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_audio.py @@ -1,8 +1,8 @@ import os -import pyblish.api import pyblish.api + class CollectAudio(pyblish.api.InstancePlugin): """ Collect relative path for audio file to instance. diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py index a60e44b69b..bc2ccca1be 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_scene.py @@ -17,7 +17,7 @@ class CollectScene(pyblish.api.ContextPlugin): """Plugin entry point.""" result = harmony.send( { - f"function": "PypeHarmony.getSceneSettings", + "function": "PypeHarmony.getSceneSettings", "args": []} )["result"] @@ -62,7 +62,7 @@ class CollectScene(pyblish.api.ContextPlugin): result = harmony.send( { - f"function": "PypeHarmony.getVersion", + "function": "PypeHarmony.getVersion", "args": []} )["result"] context.data["harmonyVersion"] = "{}.{}".format(result[0], result[1]) diff --git a/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py b/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py index b1010cfb57..488a7c4c71 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/collect_workfile.py @@ -17,10 +17,15 @@ class CollectWorkfile(pyblish.api.ContextPlugin): """Plugin entry point.""" product_type = "workfile" basename = os.path.basename(context.data["currentFile"]) + task_entity = context.data["taskEntity"] + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] product_name = get_product_name( context.data["projectName"], - context.data["assetEntity"], - context.data["task"], + task_name, + task_type, context.data["hostName"], product_type, "", diff --git a/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml b/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml index 67ad7e2d21..8c2b523e29 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml +++ b/client/ayon_core/hosts/harmony/plugins/publish/help/validate_instances.xml @@ -1,25 +1,25 @@ -Subset context +Product context ## Invalid product context -Asset name found '{found}' in products, expected '{expected}'. +Folder path found '{found}' in products, expected '{expected}'. ### How to repair? -You can fix this with `Repair` button on the right. This will use '{expected}' asset name and overwrite '{found}' asset name in scene metadata. +You can fix this with `Repair` button on the right. This will use '{expected}' folder path and overwrite '{found}' folder path in scene metadata. After that restart `Publish` with a `Reload button`. -If this is unwanted, close workfile and open again, that way different asset value would be used for context information. +If this is unwanted, close workfile and open again, that way different folder value would be used for context information. ### __Detailed Info__ (optional) This might happen if you are reuse old workfile and open it in different context. -(Eg. you created product "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing product for "Robot" asset stayed in the workfile.) +(Eg. you created product "renderCompositingDefault" from folder "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing product for "Robot" folder stayed in the workfile.) \ No newline at end of file diff --git a/client/ayon_core/hosts/harmony/plugins/publish/help/validate_scene_settings.xml b/client/ayon_core/hosts/harmony/plugins/publish/help/validate_scene_settings.xml index 36fa90456e..b645a97cb2 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/help/validate_scene_settings.xml +++ b/client/ayon_core/hosts/harmony/plugins/publish/help/validate_scene_settings.xml @@ -5,18 +5,18 @@ ## Invalid scene setting found -One of the settings in a scene doesn't match to asset settings in database. +One of the settings in a scene doesn't match to folder attributes on server. {invalid_setting_str} ### How to repair? -Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there. +Change values for {invalid_keys_str} in the scene OR change them on the folder if they are wrong there. ### __Detailed Info__ (optional) -This error is shown when for example resolution in the scene doesn't match to resolution set on the asset in the database. +This error is shown when for example resolution in the scene doesn't match to resolution set on the folder on the server. Either value in the database or in the scene is wrong. diff --git a/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py b/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py index fdba834de6..1200f6266b 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/validate_instances.py @@ -1,7 +1,7 @@ import pyblish.api import ayon_core.hosts.harmony.api as harmony -from ayon_core.pipeline import get_current_asset_name +from ayon_core.pipeline import get_current_folder_path from ayon_core.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -27,7 +27,7 @@ class ValidateInstanceRepair(pyblish.api.Action): # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) - folder_path = get_current_asset_name() + folder_path = get_current_folder_path() for instance in instances: data = harmony.read(instance.data["setMembers"][0]) data["folderPath"] = folder_path @@ -35,7 +35,7 @@ class ValidateInstanceRepair(pyblish.api.Action): class ValidateInstance(pyblish.api.InstancePlugin): - """Validate the instance asset is the current asset.""" + """Validate the instance folder is the current folder.""" label = "Validate Instance" hosts = ["harmony"] @@ -43,17 +43,18 @@ class ValidateInstance(pyblish.api.InstancePlugin): order = ValidateContentsOrder def process(self, instance): - instance_asset = instance.data["folderPath"] - current_asset = get_current_asset_name() + instance_folder_path = instance.data["folderPath"] + current_colder_path = get_current_folder_path() msg = ( - "Instance asset is not the same as current asset:" - f"\nInstance: {instance_asset}\nCurrent: {current_asset}" + "Instance folder is not the same as current folder:" + f"\nInstance: {instance_folder_path}]" + f"\nCurrent: {current_colder_path}" ) formatting_data = { - "found": instance_asset, - "expected": current_asset + "found": instance_folder_path, + "expected": current_colder_path } - if instance_asset != current_asset: + if instance_folder_path != current_colder_path: raise PublishXmlValidationError(self, msg, formatting_data=formatting_data) diff --git a/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py b/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py index 6d46fbcd33..dc3db3b544 100644 --- a/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py +++ b/client/ayon_core/hosts/harmony/plugins/publish/validate_scene_settings.py @@ -19,12 +19,12 @@ class ValidateSceneSettingsRepair(pyblish.api.Action): def process(self, context, plugin): """Repair action entry point.""" - expected = harmony.get_asset_settings() - asset_settings = _update_frames(dict.copy(expected)) - asset_settings["frameStart"] = 1 - asset_settings["frameEnd"] = asset_settings["frameEnd"] + \ - asset_settings["handleEnd"] - harmony.set_scene_settings(asset_settings) + expected = harmony.get_current_context_settings() + expected_settings = _update_frames(dict.copy(expected)) + expected_settings["frameStart"] = 1 + expected_settings["frameEnd"] = expected_settings["frameEnd"] + \ + expected_settings["handleEnd"] + harmony.set_scene_settings(expected_settings) if not os.path.exists(context.data["scenePath"]): self.log.info("correcting scene name") scene_dir = os.path.dirname(context.data["currentFile"]) @@ -56,10 +56,10 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" - # TODO 'get_asset_settings' could expect asset document as argument - # which is available on 'context.data["assetEntity"]' + # TODO 'get_current_context_settings' could expect folder entity + # as an argument which is available on 'context.data["folderEntity"]' # - the same approach can be used in 'ValidateSceneSettingsRepair' - expected_settings = harmony.get_asset_settings() + expected_settings = harmony.get_current_context_settings() self.log.info("scene settings from DB:{}".format(expected_settings)) expected_settings.pop("entityType") # not useful for the validation @@ -87,8 +87,8 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin): expected_settings.pop('frameStartHandle', None) expected_settings.pop('frameEndHandle', None) - asset_name = instance.context.data['anatomyData']['asset'] - if any(re.search(pattern, asset_name) + folder_name = instance.context.data["folderPath"].rsplit("/", 1)[-1] + if any(re.search(pattern, folder_name) for pattern in self.frame_check_filter): self.log.info("Skipping frames check because of " "task name and pattern {}".format( diff --git a/client/ayon_core/hosts/hiero/api/events.py b/client/ayon_core/hosts/hiero/api/events.py index 0e509747d5..304605e24e 100644 --- a/client/ayon_core/hosts/hiero/api/events.py +++ b/client/ayon_core/hosts/hiero/api/events.py @@ -1,10 +1,12 @@ import os + import hiero.core.events + from ayon_core.lib import Logger, register_event_callback + from .lib import ( sync_avalon_data_to_workfile, launch_workfiles_app, - selection_changed_timeline, before_project_save, ) from .tags import add_tags_to_workfile diff --git a/client/ayon_core/hosts/hiero/api/lib.py b/client/ayon_core/hosts/hiero/api/lib.py index ef0f9edca9..8682ff7780 100644 --- a/client/ayon_core/hosts/hiero/api/lib.py +++ b/client/ayon_core/hosts/hiero/api/lib.py @@ -15,12 +15,12 @@ import shutil import hiero from qtpy import QtWidgets, QtCore +import ayon_api try: from PySide import QtXml except ImportError: from PySide2 import QtXml -from ayon_core.client import get_project from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( Anatomy, @@ -166,7 +166,7 @@ def get_current_track(sequence, name, audio=False): Creates new if none is found. Args: - sequence (hiero.core.Sequence): hiero sequene object + sequence (hiero.core.Sequence): hiero sequence object name (str): name of track we want to return audio (bool)[optional]: switch to AudioTrack @@ -248,8 +248,12 @@ def get_track_items( # collect all available active sequence track items if not return_list: sequence = get_current_sequence(name=sequence_name) - # get all available tracks from sequence - tracks = list(sequence.audioTracks()) + list(sequence.videoTracks()) + tracks = [] + if sequence is not None: + # get all available tracks from sequence + tracks.extend(sequence.audioTracks()) + tracks.extend(sequence.videoTracks()) + # loop all tracks for track in tracks: if check_locked and track.isLocked(): @@ -588,7 +592,7 @@ def imprint(track_item, data=None): Examples: data = { - 'asset': 'sq020sh0280', + 'folderPath': '/shots/sq020sh0280', 'productType': 'render', 'productName': 'productMain' } @@ -632,7 +636,9 @@ def sync_avalon_data_to_workfile(): project_name = get_current_project_name() anatomy = Anatomy(project_name) - work_template = anatomy.templates["work"]["path"] + work_template = anatomy.get_template_item( + "work", "default", "path" + ) work_root = anatomy.root_value_for_template(work_template) active_project_root = ( os.path.join(work_root, project_name) @@ -654,17 +660,17 @@ def sync_avalon_data_to_workfile(): project.setProjectRoot(active_project_root) # get project data from avalon db - project_doc = get_project(project_name) - project_data = project_doc["data"] + project_entity = ayon_api.get_project(project_name) + project_attribs = project_entity["attrib"] - log.debug("project_data: {}".format(project_data)) + log.debug("project attributes: {}".format(project_attribs)) # get format and fps property from avalon db on project - width = project_data["resolutionWidth"] - height = project_data["resolutionHeight"] - pixel_aspect = project_data["pixelAspect"] - fps = project_data['fps'] - format_name = project_data['code'] + width = project_attribs["resolutionWidth"] + height = project_attribs["resolutionHeight"] + pixel_aspect = project_attribs["pixelAspect"] + fps = project_attribs["fps"] + format_name = project_entity["code"] # create new format in hiero project format = hiero.core.Format(width, height, pixel_aspect, format_name) @@ -825,7 +831,7 @@ class PublishAction(QtWidgets.QAction): # root_node = hiero.core.nuke.RootNode() # # anatomy = Anatomy(get_current_project_name()) -# work_template = anatomy.templates["work"]["path"] +# work_template = anatomy.get_template_item("work", "default", "path") # root_path = anatomy.root_value_for_template(work_template) # # nuke_script.addNode(root_node) @@ -844,8 +850,8 @@ def create_nuke_workfile_clips(nuke_workfiles, seq=None): [{ 'path': 'P:/Jakub_testy_pipeline/test_v01.nk', 'name': 'test', - 'handleStart': 15, # added asymetrically to handles - 'handleEnd': 10, # added asymetrically to handles + 'handleStart': 15, # added asymmetrically to handles + 'handleEnd': 10, # added asymmetrically to handles "clipIn": 16, "frameStart": 991, "frameEnd": 1023, @@ -1190,7 +1196,7 @@ def get_sequence_pattern_and_padding(file): Return: string: any matching sequence pattern - int: padding of sequnce numbering + int: padding of sequence numbering """ foundall = re.findall( r"(#+)|(%\d+d)|(?<=[^a-zA-Z0-9])(\d+)(?=\.\w+$)", file) diff --git a/client/ayon_core/hosts/hiero/api/menu.py b/client/ayon_core/hosts/hiero/api/menu.py index ba0cbdd120..632b11c7d3 100644 --- a/client/ayon_core/hosts/hiero/api/menu.py +++ b/client/ayon_core/hosts/hiero/api/menu.py @@ -11,7 +11,7 @@ from ayon_core.tools.utils import host_tools from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( get_current_project_name, - get_current_asset_name, + get_current_folder_path, get_current_task_name ) @@ -25,7 +25,7 @@ self._change_context_menu = None def get_context_label(): return "{}, {}".format( - get_current_asset_name(), + get_current_folder_path(), get_current_task_name() ) diff --git a/client/ayon_core/hosts/hiero/api/otio/hiero_import.py b/client/ayon_core/hosts/hiero/api/otio/hiero_import.py index 257c434011..29ff7f7325 100644 --- a/client/ayon_core/hosts/hiero/api/otio/hiero_import.py +++ b/client/ayon_core/hosts/hiero/api/otio/hiero_import.py @@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track): if isinstance(track, hiero.core.AudioTrack): kind = 'Audio' - # Gather TrackItems involved in trasition + # Gather TrackItems involved in transition item_in, item_out = get_neighboring_trackitems( otio_item, otio_track, @@ -101,7 +101,7 @@ def apply_transition(otio_track, otio_item, track): if transition_type == 'dissolve': transition_func = getattr( hiero.core.Transition, - 'create{kind}DissolveTransition'.format(kind=kind) + "create{kind}DissolveTransition".format(kind=kind) ) try: @@ -109,7 +109,7 @@ def apply_transition(otio_track, otio_item, track): item_in, item_out, otio_item.in_offset.value, - otio_item.out_offset.value + otio_item.out_offset.value, ) # Catch error raised if transition is bigger than TrackItem source @@ -134,7 +134,7 @@ def apply_transition(otio_track, otio_item, track): transition = transition_func( item_out, - otio_item.out_offset.value + otio_item.out_offset.value, ) elif transition_type == 'fade_out': @@ -183,9 +183,7 @@ def prep_url(url_in): def create_offline_mediasource(otio_clip, path=None): global _otio_old - hiero_rate = hiero.core.TimeBase( - otio_clip.source_range.start_time.rate - ) + hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate) try: legal_media_refs = ( @@ -212,7 +210,7 @@ def create_offline_mediasource(otio_clip, path=None): source_range.start_time.value, source_range.duration.value, hiero_rate, - source_range.start_time.value + source_range.start_time.value, ) return media @@ -385,7 +383,8 @@ def create_trackitem(playhead, track, otio_clip, clip): # Only reverse effect can be applied here if abs(time_scalar) == 1.: trackitem.setPlaybackSpeed( - trackitem.playbackSpeed() * time_scalar) + trackitem.playbackSpeed() * time_scalar + ) elif isinstance(effect, otio.schema.FreezeFrame): # For freeze frame, playback speed must be set after range @@ -397,28 +396,21 @@ def create_trackitem(playhead, track, otio_clip, clip): source_in = source_range.end_time_inclusive().value timeline_in = playhead + source_out - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 else: # Normal playback speed source_in = source_range.start_time.value source_out = source_range.end_time_inclusive().value timeline_in = playhead - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 # Set source and timeline in/out points trackitem.setTimes( timeline_in, timeline_out, source_in, - source_out - + source_out, ) # Apply playback speed for freeze frames @@ -435,7 +427,8 @@ def create_trackitem(playhead, track, otio_clip, clip): def build_sequence( - otio_timeline, project=None, sequence=None, track_kind=None): + otio_timeline, project=None, sequence=None, track_kind=None +): if project is None: if sequence: project = sequence.project() @@ -509,10 +502,7 @@ def build_sequence( # Create TrackItem trackitem = create_trackitem( - playhead, - track, - otio_clip, - clip + playhead, track, otio_clip, clip ) # Add markers diff --git a/client/ayon_core/hosts/hiero/api/otio/utils.py b/client/ayon_core/hosts/hiero/api/otio/utils.py index 4c5d46bd51..f7cb58f1e8 100644 --- a/client/ayon_core/hosts/hiero/api/otio/utils.py +++ b/client/ayon_core/hosts/hiero/api/otio/utils.py @@ -25,7 +25,7 @@ def get_reformated_path(path, padded=True): path (str): path url or simple file name Returns: - type: string with reformated path + type: string with reformatted path Example: get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr diff --git a/client/ayon_core/hosts/hiero/api/pipeline.py b/client/ayon_core/hosts/hiero/api/pipeline.py index a9ba2e4df3..327a4ae29c 100644 --- a/client/ayon_core/hosts/hiero/api/pipeline.py +++ b/client/ayon_core/hosts/hiero/api/pipeline.py @@ -101,7 +101,7 @@ def containerise(track_item, "name": str(name), "namespace": str(namespace), "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], }) if data: diff --git a/client/ayon_core/hosts/hiero/api/plugin.py b/client/ayon_core/hosts/hiero/api/plugin.py index 8f1be97680..1353673b31 100644 --- a/client/ayon_core/hosts/hiero/api/plugin.py +++ b/client/ayon_core/hosts/hiero/api/plugin.py @@ -45,7 +45,7 @@ class CreatorWidget(QtWidgets.QDialog): | QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowStaysOnTopHint ) - self.setWindowTitle(name or "Pype Creator Input") + self.setWindowTitle(name or "AYON Creator Input") self.resize(500, 700) # Where inputs and labels are set @@ -363,7 +363,7 @@ class SequenceLoader(LoaderPlugin): ): pass - def update(self, container, representation): + def update(self, container, context): """Update an existing `container` """ pass @@ -409,8 +409,9 @@ class ClipLoader: "Cannot Load selected data, look into database " "or call your supervisor") - # inject asset data to representation dict - self._get_asset_data() + # inject folder data to representation dict + folder_entity = self.context["folder"] + self.data["folderAttributes"] = folder_entity["attrib"] log.info("__init__ self.data: `{}`".format(pformat(self.data))) log.info("__init__ options: `{}`".format(pformat(options))) @@ -424,7 +425,7 @@ class ClipLoader: self.active_sequence = lib.get_current_sequence(new=True) self.active_sequence.setFramerate( hiero.core.TimeBase.fromString( - str(self.data["assetData"]["fps"]))) + str(self.data["folderAttributes"]["fps"]))) else: self.active_sequence = lib.get_current_sequence() @@ -447,16 +448,16 @@ class ClipLoader: # create name repr = self.context["representation"] repr_cntx = repr["context"] - asset = str(repr_cntx["asset"]) - product_name = str(repr_cntx["subset"]) - representation = str(repr_cntx["representation"]) + folder_path = self.context["folder"]["path"] + product_name = self.context["product"]["name"] + representation = repr["name"] self.data["clip_name"] = self.clip_name_template.format(**repr_cntx) self.data["track_name"] = "_".join([product_name, representation]) - self.data["versionData"] = self.context["version"]["data"] + self.data["versionAttributes"] = self.context["version"]["attrib"] # gets file path file = get_representation_path_from_context(self.context) if not file: - repr_id = repr["_id"] + repr_id = repr["id"] log.warning( "Representation id `{}` is failing to load".format(repr_id)) return None @@ -467,11 +468,7 @@ class ClipLoader: self._fix_path_hashes() # solve project bin structure path - hierarchy = str("/".join(( - "Loader", - repr_cntx["hierarchy"].replace("\\", "/"), - asset - ))) + hierarchy = "Loader{}".format(folder_path) self.data["binPath"] = hierarchy @@ -487,16 +484,6 @@ class ClipLoader: file = file.replace(frame, "#" * padding) self.data["path"] = file - def _get_asset_data(self): - """ Get all available asset data - - joint `data` key with asset.data dict into the representation - - """ - - asset_doc = self.context["asset"] - self.data["assetData"] = asset_doc["data"] - def _make_track_item(self, source_bin_item, audio=False): """ Create track item with """ @@ -530,12 +517,13 @@ class ClipLoader: self.media_duration = int(self.media.duration()) # get handles - self.handle_start = self.data["versionData"].get("handleStart") - self.handle_end = self.data["versionData"].get("handleEnd") + version_attributes = self.data["versionAttributes"] + self.handle_start = version_attributes.get("handleStart") + self.handle_end = version_attributes.get("handleEnd") if self.handle_start is None: - self.handle_start = self.data["assetData"]["handleStart"] + self.handle_start = self.data["folderAttributes"]["handleStart"] if self.handle_end is None: - self.handle_end = self.data["assetData"]["handleEnd"] + self.handle_end = self.data["folderAttributes"]["handleEnd"] self.handle_start = int(self.handle_start) self.handle_end = int(self.handle_end) @@ -552,11 +540,11 @@ class ClipLoader: last_timeline_out = int(last_track_item.timelineOut()) + 1 self.timeline_in = last_timeline_out self.timeline_out = last_timeline_out + int( - self.data["assetData"]["clipOut"] - - self.data["assetData"]["clipIn"]) + self.data["folderAttributes"]["clipOut"] + - self.data["folderAttributes"]["clipIn"]) else: - self.timeline_in = int(self.data["assetData"]["clipIn"]) - self.timeline_out = int(self.data["assetData"]["clipOut"]) + self.timeline_in = int(self.data["folderAttributes"]["clipIn"]) + self.timeline_out = int(self.data["folderAttributes"]["clipOut"]) log.debug("__ self.timeline_in: {}".format(self.timeline_in)) log.debug("__ self.timeline_out: {}".format(self.timeline_out)) @@ -917,16 +905,16 @@ class PublishClip: "hierarchyData": hierarchy_formatting_data, "productName": self.product_name, "productType": self.product_type, - "families": [self.product_type, self.data["family"]] + "families": [self.product_type, self.data["productType"]] } - def _convert_to_entity(self, type, template): + def _convert_to_entity(self, src_type, template): """ Converting input key to key with type. """ # convert to entity type - entity_type = self.types.get(type, None) + folder_type = self.types.get(src_type, None) - assert entity_type, "Missing entity type for `{}`".format( - type + assert folder_type, "Missing folder type for `{}`".format( + src_type ) # first collect formatting data to use for formatting template @@ -937,7 +925,7 @@ class PublishClip: formatting_data[_k] = value return { - "entity_type": entity_type, + "folder_type": folder_type, "entity_name": template.format( **formatting_data ) diff --git a/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py b/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py index 9c919e7cb4..6a8057ec1e 100644 --- a/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py +++ b/client/ayon_core/hosts/hiero/api/startup/Python/Startup/SpreadsheetExport.py @@ -3,9 +3,11 @@ # Note: This only prints the text data that is visible in the active Spreadsheet View. # If you've filtered text, only the visible text will be printed to the CSV file # Usage: Copy to ~/.hiero/Python/StartupUI +import os +import csv + import hiero.core.events import hiero.ui -import os, csv try: from PySide.QtGui import * from PySide.QtCore import * diff --git a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py index b8dfb07b47..fcfa24310e 100644 --- a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py +++ b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/PimpMySpreadsheet.py @@ -641,7 +641,7 @@ def _setStatus(self, status): global gStatusTags # Get a valid Tag object from the Global list of statuses - if not status in gStatusTags.keys(): + if status not in gStatusTags.keys(): print("Status requested was not a valid Status string.") return diff --git a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py index 17c044f3ec..d2fe608d99 100644 --- a/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py +++ b/client/ayon_core/hosts/hiero/api/startup/Python/StartupUI/otioimporter/OTIOImport.py @@ -90,7 +90,7 @@ def apply_transition(otio_track, otio_item, track): kind = "Audio" try: - # Gather TrackItems involved in trasition + # Gather TrackItems involved in transition item_in, item_out = get_neighboring_trackitems( otio_item, otio_track, @@ -101,14 +101,14 @@ def apply_transition(otio_track, otio_item, track): if transition_type == "dissolve": transition_func = getattr( hiero.core.Transition, - 'create{kind}DissolveTransition'.format(kind=kind) + "create{kind}DissolveTransition".format(kind=kind) ) transition = transition_func( item_in, item_out, otio_item.in_offset.value, - otio_item.out_offset.value + otio_item.out_offset.value, ) elif transition_type == "fade_in": @@ -116,20 +116,14 @@ def apply_transition(otio_track, otio_item, track): hiero.core.Transition, 'create{kind}FadeInTransition'.format(kind=kind) ) - transition = transition_func( - item_out, - otio_item.out_offset.value - ) + transition = transition_func(item_out, otio_item.out_offset.value) elif transition_type == "fade_out": transition_func = getattr( hiero.core.Transition, - 'create{kind}FadeOutTransition'.format(kind=kind) - ) - transition = transition_func( - item_in, - otio_item.in_offset.value + "create{kind}FadeOutTransition".format(kind=kind) ) + transition = transition_func(item_in, otio_item.in_offset.value) else: # Unknown transition @@ -138,11 +132,10 @@ def apply_transition(otio_track, otio_item, track): # Apply transition to track track.addTransition(transition) - except Exception, e: + except Exception as e: sys.stderr.write( 'Unable to apply transition "{t}": "{e}"\n'.format( - t=otio_item, - e=e + t=otio_item, e=e ) ) @@ -153,18 +146,14 @@ def prep_url(url_in): if url.startswith("file://localhost/"): return url.replace("file://localhost/", "") - url = '{url}'.format( - sep=url.startswith(os.sep) and "" or os.sep, - url=url.startswith(os.sep) and url[1:] or url - ) + if url.startswith(os.sep): + url = url[1:] return url def create_offline_mediasource(otio_clip, path=None): - hiero_rate = hiero.core.TimeBase( - otio_clip.source_range.start_time.rate - ) + hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate) if isinstance(otio_clip.media_reference, otio.schema.ExternalReference): source_range = otio_clip.available_range() @@ -180,7 +169,7 @@ def create_offline_mediasource(otio_clip, path=None): source_range.start_time.value, source_range.duration.value, hiero_rate, - source_range.start_time.value + source_range.start_time.value, ) return media @@ -203,7 +192,7 @@ marker_color_map = { "MAGENTA": "Magenta", "BLACK": "Blue", "WHITE": "Green", - "MINT": "Cyan" + "MINT": "Cyan", } @@ -254,12 +243,6 @@ def add_markers(otio_item, hiero_item, tagsbin): if _tag is None: _tag = hiero.core.Tag(marker_color_map[marker.color]) - start = marker.marked_range.start_time.value - end = ( - marker.marked_range.start_time.value + - marker.marked_range.duration.value - ) - tag = hiero_item.addTag(_tag) tag.setName(marker.name or marker_color_map[marker_color]) @@ -275,12 +258,12 @@ def create_track(otio_track, tracknum, track_kind): # Create a Track if otio_track.kind == otio.schema.TrackKind.Video: track = hiero.core.VideoTrack( - otio_track.name or 'Video{n}'.format(n=tracknum) + otio_track.name or "Video{n}".format(n=tracknum) ) else: track = hiero.core.AudioTrack( - otio_track.name or 'Audio{n}'.format(n=tracknum) + otio_track.name or "Audio{n}".format(n=tracknum) ) return track @@ -315,34 +298,25 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin): for effect in otio_clip.effects: if isinstance(effect, otio.schema.LinearTimeWarp): trackitem.setPlaybackSpeed( - trackitem.playbackSpeed() * - effect.time_scalar + trackitem.playbackSpeed() * effect.time_scalar ) # If reverse playback speed swap source in and out if trackitem.playbackSpeed() < 0: source_out = source_range.start_time.value source_in = ( - source_range.start_time.value + - source_range.duration.value + source_range.start_time.value + source_range.duration.value ) - 1 timeline_in = playhead + source_out - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 else: # Normal playback speed source_in = source_range.start_time.value source_out = ( - source_range.start_time.value + - source_range.duration.value + source_range.start_time.value + source_range.duration.value ) - 1 timeline_in = playhead - timeline_out = ( - timeline_in + - source_range.duration.value - ) - 1 + timeline_out = (timeline_in + source_range.duration.value) - 1 # Set source and timeline in/out points trackitem.setSourceIn(source_in) @@ -357,7 +331,8 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin): def build_sequence( - otio_timeline, project=None, sequence=None, track_kind=None): + otio_timeline, project=None, sequence=None, track_kind=None +): if project is None: if sequence: @@ -414,8 +389,7 @@ def build_sequence( if isinstance(otio_clip, otio.schema.Stack): bar = hiero.ui.mainWindow().statusBar() bar.showMessage( - "Nested sequences are created separately.", - timeout=3000 + "Nested sequences are created separately.", timeout=3000 ) build_sequence(otio_clip, project, otio_track.kind) @@ -428,11 +402,7 @@ def build_sequence( # Create TrackItem trackitem = create_trackitem( - playhead, - track, - otio_clip, - clip, - tagsbin + playhead, track, otio_clip, clip, tagsbin ) # Add trackitem to track diff --git a/client/ayon_core/hosts/hiero/api/tags.py b/client/ayon_core/hosts/hiero/api/tags.py index 6491b1f384..5abfee75d0 100644 --- a/client/ayon_core/hosts/hiero/api/tags.py +++ b/client/ayon_core/hosts/hiero/api/tags.py @@ -1,9 +1,9 @@ import json import re -import os import hiero -from ayon_core.client import get_project, get_assets +import ayon_api + from ayon_core.lib import Logger from ayon_core.pipeline import get_current_project_name @@ -89,7 +89,7 @@ def update_tag(tag, data): # set all data metadata to tag metadata for _k, _v in data_mtd.items(): value = str(_v) - if type(_v) == dict: + if isinstance(_v, dict): value = json.dumps(_v) # set the value @@ -143,40 +143,22 @@ def add_tags_to_workfile(): # Get project task types. project_name = get_current_project_name() - project_doc = get_project(project_name) - tasks = project_doc["config"]["tasks"] + project_entity = ayon_api.get_project(project_name) + task_types = project_entity["taskType"] nks_pres_tags["[Tasks]"] = {} - log.debug("__ tasks: {}".format(tasks)) - for task_type in tasks.keys(): - nks_pres_tags["[Tasks]"][task_type.lower()] = { + log.debug("__ tasks: {}".format(task_types)) + for task_type in task_types: + task_type_name = task_type["name"] + nks_pres_tags["[Tasks]"][task_type_name.lower()] = { "editable": "1", - "note": task_type, + "note": task_type_name, "icon": "icons:TagGood.png", "metadata": { "productType": "task", - "type": task_type + "type": task_type_name } } - # Get project assets. Currently Ftrack specific to differentiate between - # asset builds and shots. - if int(os.getenv("TAG_ASSETBUILD_STARTUP", 0)) == 1: - nks_pres_tags["[AssetBuilds]"] = {} - for asset in get_assets( - project_name, fields=["name", "data.entityType"] - ): - if asset["data"]["entityType"] == "AssetBuild": - nks_pres_tags["[AssetBuilds]"][asset["name"]] = { - "editable": "1", - "note": "", - "icon": { - "path": "icons:TagActor.png" - }, - "metadata": { - "productType": "assetbuild" - } - } - # loop through tag data dict and create deep tag structure for _k, _val in nks_pres_tags.items(): # check if key is not decorated with [] so it is defined as bin diff --git a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py index 90ea9ef50f..1fc808fdd1 100644 --- a/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/create/create_shot_clip.py @@ -16,7 +16,7 @@ class CreateShotClip(phiero.Creator): gui_tracks = [track.name() for track in phiero.get_current_sequence().videoTracks()] - gui_name = "Pype publish attributes creator" + gui_name = "AYON publish attributes creator" gui_info = "Define sequential rename and fill hierarchy data." gui_inputs = { "renameHierarchy": { @@ -137,7 +137,7 @@ class CreateShotClip(phiero.Creator): "value": ["", "main", "bg", "fg", "bg", "animatic"], "type": "QComboBox", - "label": "pRODUCT Name", + "label": "Product Name", "target": "ui", "toolTip": "chose product name pattern, if is selected, name of track layer will be used", # noqa "order": 0}, @@ -159,14 +159,14 @@ class CreateShotClip(phiero.Creator): "type": "QCheckBox", "label": "Include audio", "target": "tag", - "toolTip": "Process productS with corresponding audio", # noqa + "toolTip": "Process products with corresponding audio", # noqa "order": 3}, "sourceResolution": { "value": False, "type": "QCheckBox", "label": "Source resolution", "target": "tag", - "toolTip": "Is resloution taken from timeline or source?", # noqa + "toolTip": "Is resolution taken from timeline or source?", # noqa "order": 4}, } }, @@ -211,7 +211,7 @@ class CreateShotClip(phiero.Creator): presets = deepcopy(self.presets) gui_inputs = deepcopy(self.gui_inputs) - # get key pares from presets and match it on ui inputs + # get key pairs from presets and match it on ui inputs for k, v in gui_inputs.items(): if v["type"] in ("dict", "section"): # nested dictionary (only one level allowed diff --git a/client/ayon_core/hosts/hiero/plugins/load/load_clip.py b/client/ayon_core/hosts/hiero/plugins/load/load_clip.py index 686edaa01f..715e8c508e 100644 --- a/client/ayon_core/hosts/hiero/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/hiero/plugins/load/load_clip.py @@ -1,11 +1,6 @@ -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id -) -from ayon_core.pipeline import ( - get_representation_path, - get_current_project_name, -) +import ayon_api + +from ayon_core.pipeline import get_representation_path from ayon_core.lib.transcoding import ( VIDEO_EXTENSIONS, IMAGE_EXTENSIONS @@ -20,8 +15,8 @@ class LoadClip(phiero.SequenceLoader): during conforming to project """ - families = ["render2d", "source", "plate", "render", "review"] - representations = ["*"] + product_types = {"render2d", "source", "plate", "render", "review"} + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) @@ -64,13 +59,7 @@ class LoadClip(phiero.SequenceLoader): if option == "representations": continue - if option == "product_types": - # TODO remove the key conversion when loaders can filter by - # product types - # convert 'product_types' to 'families' - option = "families" - - elif option == "clip_name_template": + if option == "clip_name_template": # TODO remove the formatting replacement value = ( value @@ -101,10 +90,10 @@ class LoadClip(phiero.SequenceLoader): path = self.filepath_from_context(context) track_item = phiero.ClipLoader(self, context, path, **options).load() namespace = namespace or track_item.name() - version = context['version'] - version_data = version.get("data", {}) - version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + version_entity = context["version"] + version_attributes = version_entity["attrib"] + version_name = version_entity["version"] + colorspace = version_attributes.get("colorSpace") object_name = self.clip_name_template.format( **context["representation"]["context"]) @@ -119,11 +108,11 @@ class LoadClip(phiero.SequenceLoader): ] # move all version data keys to tag data - data_imprint = {} - for key in add_keys: - data_imprint.update({ - key: version_data.get(key, str(None)) - }) + data_imprint = { + key: version_attributes.get(key, str(None)) + for key in add_keys + + } # add variables related to version context data_imprint.update({ @@ -133,7 +122,9 @@ class LoadClip(phiero.SequenceLoader): }) # update color of clip regarding the version order - self.set_item_color(track_item, version) + self.set_item_color( + context["project"]["name"], track_item, version_entity + ) # deal with multiselection self.multiselection(track_item) @@ -146,27 +137,27 @@ class LoadClip(phiero.SequenceLoader): self.__class__.__name__, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """ Updating previously loaded clips """ + version_entity = context["version"] + repre_entity = context["representation"] # load clip to timeline and get main variables - name = container['name'] - namespace = container['namespace'] + name = container["name"] + namespace = container["namespace"] track_item = phiero.get_track_items( track_item_name=namespace).pop() - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) - - version_data = version_doc.get("data", {}) - version_name = version_doc.get("name", None) - colorspace = version_data.get("colorspace", None) + version_attributes = version_entity["attrib"] + version_name = version_entity["version"] + colorspace = version_attributes.get("colorSpace") object_name = "{}_{}".format(name, namespace) - file = get_representation_path(representation).replace("\\", "/") + + file = get_representation_path(repre_entity).replace("\\", "/") clip = track_item.source() # reconnect media to new path @@ -176,29 +167,35 @@ class LoadClip(phiero.SequenceLoader): if colorspace: clip.setSourceMediaColourTransform(colorspace) - # add additional metadata from the version to imprint Avalon knob - add_keys = [ - "frameStart", "frameEnd", "source", "author", - "fps", "handleStart", "handleEnd" - ] + # add additional metadata from the version to imprint metadata knob # move all version data keys to tag data data_imprint = {} - for key in add_keys: + for key in [ + "frameStart", + "frameEnd", + "source", + "author", + "fps", + "handleStart", + "handleEnd", + ]: data_imprint.update({ - key: version_data.get(key, str(None)) + key: version_attributes.get(key, str(None)) }) # add variables related to version context data_imprint.update({ - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "version": version_name, "colorspace": colorspace, "objectName": object_name }) # update color of clip regarding the version order - self.set_item_color(track_item, version_doc) + self.set_item_color( + context["project"]["name"], track_item, version_entity + ) return phiero.update_container(track_item, data_imprint) @@ -221,14 +218,13 @@ class LoadClip(phiero.SequenceLoader): cls.sequence = cls.track.parent() @classmethod - def set_item_color(cls, track_item, version_doc): - project_name = get_current_project_name() - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + def set_item_color(cls, project_name, track_item, version_entity): + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) clip = track_item.source() # set clip colour - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: clip.binItem().setColor(cls.clip_color_last) else: clip.binItem().setColor(cls.clip_color) diff --git a/client/ayon_core/hosts/hiero/plugins/load/load_effects.py b/client/ayon_core/hosts/hiero/plugins/load/load_effects.py index 809080e87e..92aa2de325 100644 --- a/client/ayon_core/hosts/hiero/plugins/load/load_effects.py +++ b/client/ayon_core/hosts/hiero/plugins/load/load_effects.py @@ -2,15 +2,10 @@ import json from collections import OrderedDict import six -from ayon_core.client import ( - get_version_by_id -) - from ayon_core.pipeline import ( AVALON_CONTAINER_ID, load, get_representation_path, - get_current_project_name ) from ayon_core.hosts.hiero import api as phiero from ayon_core.lib import Logger @@ -19,8 +14,8 @@ from ayon_core.lib import Logger class LoadEffects(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" - families = ["effect"] - representations = ["*"] + product_types = {"effect"} + representations = {"*"} extension = {"json"} label = "Load Effects" @@ -37,7 +32,7 @@ class LoadEffects(load.LoaderPlugin): Arguments: context (dict): context of version name (str): name of the version - namespace (str): asset name + namespace (str): Folder name. data (dict): compulsory attribute > not used Returns: @@ -48,10 +43,10 @@ class LoadEffects(load.LoaderPlugin): active_sequence, "Loaded_{}".format(name)) # get main variables - namespace = namespace or context["asset"]["name"] + namespace = namespace or context["folder"]["name"] object_name = "{}_{}".format(name, namespace) - clip_in = context["asset"]["data"]["clipIn"] - clip_out = context["asset"]["data"]["clipOut"] + clip_in = context["folder"]["attrib"]["clipIn"] + clip_out = context["folder"]["attrib"]["clipOut"] data_imprint = { "objectName": object_name, @@ -157,22 +152,22 @@ class LoadEffects(load.LoaderPlugin): return loaded - def update(self, container, representation): + def update(self, container, context): """ Updating previously loaded effects """ + version_entity = context["version"] + repre_entity = context["representation"] active_track = container["_item"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_entity).replace("\\", "/") # get main variables name = container['name'] namespace = container['namespace'] # get timeline in out data - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) - version_data = version_doc["data"] - clip_in = version_data["clipIn"] - clip_out = version_data["clipOut"] + version_attributes = version_entity["attrib"] + clip_in = version_attributes["clipIn"] + clip_out = version_attributes["clipOut"] object_name = "{}_{}".format(name, namespace) @@ -197,7 +192,7 @@ class LoadEffects(load.LoaderPlugin): data_imprint = { "objectName": object_name, "name": name, - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "children_names": [] } @@ -256,8 +251,8 @@ class LoadEffects(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): pass @@ -298,7 +293,7 @@ class LoadEffects(load.LoaderPlugin): "name": str(name), "namespace": str(namespace), "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } } diff --git a/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py b/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py index 32b4864022..bfc63f2551 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -1,5 +1,5 @@ -from itertools import product import re + import pyblish.api diff --git a/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py index d73b5d4667..0e5d849b78 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/collect_frame_tag_instances.py @@ -5,8 +5,6 @@ import json import pyblish.api -from ayon_core.client import get_asset_name_identifier - class CollectFrameTagInstances(pyblish.api.ContextPlugin): """Collect frames from tags. @@ -104,8 +102,7 @@ class CollectFrameTagInstances(pyblish.api.ContextPlugin): # first collect all available product tag frames product_data = {} - context_asset_doc = context.data["assetEntity"] - context_folder_path = get_asset_name_identifier(context_asset_doc) + context_folder_path = context.data["folderEntity"]["path"] for tag_data in sequence_tags: frame = int(tag_data["start"]) diff --git a/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py b/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py index bcaf5308d9..3599a830d2 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -4,12 +4,12 @@ import pyblish.api from ayon_core.pipeline import publish -class ExtractThumnail(publish.Extractor): +class ExtractThumbnail(publish.Extractor): """ - Extractor for track item's tumnails + Extractor for track item's tumbnails """ - label = "Extract Thumnail" + label = "Extract Thumbnail" order = pyblish.api.ExtractorOrder families = ["plate", "take"] hosts = ["hiero"] @@ -48,7 +48,7 @@ class ExtractThumnail(publish.Extractor): self.log.debug( "__ thumb_path: `{}`, frame: `{}`".format(thumbnail, thumb_frame)) - self.log.info("Thumnail was generated to: {}".format(thumb_path)) + self.log.info("Thumbnail was generated to: {}".format(thumb_path)) thumb_representation = { 'files': thumb_file, 'stagingDir': staging_dir, diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py index 911b96c280..b7a508f0b5 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_instances.py @@ -43,7 +43,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): tracks_effect_items = self.collect_sub_track_items(all_tracks) context.data["tracksEffectItems"] = tracks_effect_items - # process all sellected timeline track items + # process all selected timeline track items for track_item in selected_timeline_items: data = {} clip_name = track_item.name() @@ -62,7 +62,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): }: continue - # get clips subtracks and anotations + # get clips subtracks and annotations annotations = self.clip_annotations(source_clip) subtracks = self.clip_subtrack(track_item) self.log.debug("Annotations: {}".format(annotations)) @@ -84,21 +84,16 @@ class PrecollectInstances(pyblish.api.ContextPlugin): k: v for k, v in tag_data.items() if k not in ("id", "applieswhole", "label") }) + # Backward compatibility fix of 'entity_type' > 'folder_type' + if "parents" in data: + for parent in data["parents"]: + if "entity_type" in parent: + parent["folder_type"] = parent.pop("entity_type") - asset, asset_name = self._get_asset_data(tag_data) - - product_name = tag_data.get("productName") - if product_name is None: - product_name = tag_data["subset"] + folder_path, folder_name = self._get_folder_data(tag_data) families = [str(f) for f in tag_data["families"]] - # form label - label = "{} -".format(asset) - if asset_name != clip_name: - label += " ({})".format(clip_name) - label += " {}".format(product_name) - # TODO: remove backward compatibility product_name = tag_data.get("productName") if product_name is None: @@ -108,7 +103,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_name should not be missing if not product_name: self.log.error( - "Product name is not defined for: {}".format(asset)) + "Product name is not defined for: {}".format(folder_path)) # TODO: remove backward compatibility product_type = tag_data.get("productType") @@ -119,15 +114,21 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_type should not be missing if not product_type: self.log.error( - "Product type is not defined for: {}".format(asset)) + "Product type is not defined for: {}".format(folder_path)) + + # form label + label = "{} -".format(folder_path) + if folder_name != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, - "folderPath": asset, - "asset_name": asset_name, "productName": product_name, "productType": product_type, + "folderPath": folder_path, + "asset_name": folder_name, "item": track_item, "families": families, "publish": tag_data["publish"], @@ -217,19 +218,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["folderPath"] - asset_name = data["asset_name"] + folder_path = data["folderPath"] + folder_name = data["asset_name"] product_type = "shot" # form label - label = "{} -".format(asset) - if asset_name != clip_name: + label = "{} -".format(folder_path) + if folder_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, "productName": product_name, "productType": product_type, @@ -242,13 +243,13 @@ class PrecollectInstances(pyblish.api.ContextPlugin): self.log.debug( "_ instance.data: {}".format(pformat(instance.data))) - def _get_asset_data(self, data): + def _get_folder_data(self, data): folder_path = data.pop("folderPath", None) if data.get("asset_name"): - asset_name = data["asset_name"] + folder_name = data["asset_name"] else: - asset_name = data["asset"] + folder_name = data["asset"] # backward compatibility for clip tags # which are missing folderPath key @@ -257,10 +258,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): hierarchy_path = data["hierarchy"] folder_path = "/{}/{}".format( hierarchy_path, - asset_name + folder_name ) - return folder_path, asset_name + return folder_path, folder_name def create_audio_instance(self, context, **data): product_name = "audioMain" @@ -276,19 +277,19 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not self.test_any_audio(item): return - asset = data["folderPath"] + folder_path = data["folderPath"] asset_name = data["asset_name"] product_type = "audio" # form label - label = "{} -".format(asset) + label = "{} -".format(folder_path) if asset_name != clip_name: label += " ({}) ".format(clip_name) label += " {}".format(product_name) data.update({ - "name": "{}_{}".format(asset, product_name), + "name": "{}_{}".format(folder_path, product_name), "label": label, "productName": product_name, "productType": product_type, @@ -378,12 +379,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # collect all subtrack items sub_track_items = {} for track in tracks: - items = track.items() - - effet_items = track.subTrackItems() + effect_items = track.subTrackItems() # skip if no clips on track > need track with effect only - if not effet_items: + if not effect_items: continue # skip all disabled tracks @@ -391,7 +390,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): continue track_index = track.trackIndex() - _sub_track_items = phiero.flatten(effet_items) + _sub_track_items = phiero.flatten(effect_items) _sub_track_items = list(_sub_track_items) # continue only if any subtrack items are collected @@ -439,10 +438,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin): for item in subTrackItems: if "TimeWarp" in item.name(): continue - # avoid all anotation + # avoid all annotation if isinstance(item, hiero.core.Annotation): continue - # # avoid all not anaibled + # avoid all disabled if not item.isEnabled(): continue subtracks.append(item) diff --git a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py index 8df6cd4261..0b6b34ea6c 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/client/ayon_core/hosts/hiero/plugins/publish/precollect_workfile.py @@ -17,8 +17,8 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.491 def process(self, context): - asset = context.data["folderPath"] - asset_name = asset.split("/")[-1] + folder_path = context.data["folderPath"] + folder_name = folder_path.split("/")[-1] active_timeline = hiero.ui.activeSequence() project = active_timeline.project() @@ -62,12 +62,12 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): product_type = "workfile" instance_data = { "label": "{} - {}Main".format( - asset, product_type), - "name": "{}_{}".format(asset_name, product_type), - "folderPath": context.data["folderPath"], + folder_path, product_type), + "name": "{}_{}".format(folder_name, product_type), + "folderPath": folder_path, # TODO use 'get_product_name' "productName": "{}{}Main".format( - asset_name, product_type.capitalize() + folder_name, product_type.capitalize() ), "item": project, "productType": product_type, diff --git a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py b/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py deleted file mode 100644 index 96d471115a..0000000000 --- a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py +++ /dev/null @@ -1,64 +0,0 @@ -from pyblish import api - -from ayon_core.client import get_assets, get_asset_name_identifier - - -class CollectAssetBuilds(api.ContextPlugin): - """Collect asset from tags. - - Tag is expected to have name of the asset and metadata: - { - "productType": "assetbuild" - } - """ - - # Run just after CollectClip - order = api.CollectorOrder + 0.02 - label = "Collect AssetBuilds" - hosts = ["hiero"] - - def process(self, context): - project_name = context.data["projectName"] - asset_builds = {} - for asset_doc in get_assets(project_name): - if asset_doc["data"].get("entityType") != "AssetBuild": - continue - - asset_name = get_asset_name_identifier(asset_doc) - self.log.debug("Found \"{}\" in database.".format(asset_doc)) - asset_builds[asset_name] = asset_doc - - for instance in context: - if instance.data["productType"] != "clip": - continue - - # Exclude non-tagged instances. - tagged = False - asset_names = [] - - for tag in instance.data["tags"]: - t_metadata = dict(tag.metadata()) - t_product_type = t_metadata.get("tag.productType") - if t_product_type is None: - t_product_type = t_metadata.get("tag.family", "") - - if t_product_type.lower() == "assetbuild": - asset_names.append(tag["name"]) - tagged = True - - if not tagged: - self.log.debug( - "Skipping \"{}\" because its not tagged with " - "\"assetbuild\"".format(instance) - ) - continue - - # Collect asset builds. - data = {"assetbuilds": []} - for name in asset_names: - data["assetbuilds"].append(asset_builds[name]) - self.log.debug( - "Found asset builds: {}".format(data["assetbuilds"]) - ) - - instance.data.update(data) diff --git a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py b/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py index 297ffa8001..8503a0b6a7 100644 --- a/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py +++ b/client/ayon_core/hosts/hiero/plugins/publish_old_workflow/precollect_retime.py @@ -35,10 +35,6 @@ class PrecollectRetime(api.InstancePlugin): source_out = int(track_item.sourceOut()) speed = track_item.playbackSpeed() - # calculate available material before retime - available_in = int(track_item.handleInLength() * speed) - available_out = int(track_item.handleOutLength() * speed) - self.log.debug(( "_BEFORE: \n timeline_in: `{0}`,\n timeline_out: `{1}`, \n " "source_in: `{2}`,\n source_out: `{3}`,\n speed: `{4}`,\n " diff --git a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py index 57fdef753a..72c157f187 100644 --- a/client/ayon_core/hosts/houdini/api/creator_node_shelves.py +++ b/client/ayon_core/hosts/houdini/api/creator_node_shelves.py @@ -12,7 +12,8 @@ import tempfile import logging import os -from ayon_core.client import get_asset_by_name +import ayon_api + from ayon_core.pipeline import registered_host from ayon_core.pipeline.create import CreateContext from ayon_core.resources import get_ayon_icon_filepath @@ -90,13 +91,19 @@ def create_interactive(creator_identifier, **kwargs): pane = stateutils.activePane(kwargs) if isinstance(pane, hou.NetworkEditor): pwd = pane.pwd() + project_name = context.get_current_project_name() + folder_path = context.get_current_folder_path() + task_name = context.get_current_task_name() + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = creator.get_product_name( project_name=context.get_current_project_name(), - asset_doc=get_asset_by_name( - project_name=context.get_current_project_name(), - asset_name=context.get_current_asset_name() - ), - task_name=context.get_current_task_name(), + folder_entity=folder_entity, + task_entity=task_entity, variant=variant, host_name=context.host_name, ) diff --git a/client/ayon_core/hosts/houdini/api/lib.py b/client/ayon_core/hosts/houdini/api/lib.py index 9db055779d..7ca8f7f8f0 100644 --- a/client/ayon_core/hosts/houdini/api/lib.py +++ b/client/ayon_core/hosts/houdini/api/lib.py @@ -3,27 +3,26 @@ import sys import os import errno import re -import uuid import logging import json from contextlib import contextmanager import six +import ayon_api from ayon_core.lib import StringTemplate -from ayon_core.client import get_project, get_asset_by_name from ayon_core.settings import get_current_project_settings from ayon_core.pipeline import ( Anatomy, get_current_project_name, - get_current_asset_name, + get_current_folder_path, registered_host, get_current_context, get_current_host_name, ) from ayon_core.pipeline.create import CreateContext from ayon_core.pipeline.template_data import get_template_data -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.tools.utils import PopupUpdateKeys, SimplePopup from ayon_core.tools.utils.host_tools import get_tool_by_name @@ -36,89 +35,12 @@ log = logging.getLogger(__name__) JSON_PREFIX = "JSON:::" -def get_asset_fps(asset_doc=None): - """Return current asset fps.""" +def get_folder_fps(folder_entity=None): + """Return current folder fps.""" - if asset_doc is None: - asset_doc = get_current_project_asset(fields=["data.fps"]) - return asset_doc["data"]["fps"] - - -def set_id(node, unique_id, overwrite=False): - exists = node.parm("id") - if not exists: - imprint(node, {"id": unique_id}) - - if not exists and overwrite: - node.setParm("id", unique_id) - - -def get_id(node): - """Get the `cbId` attribute of the given node. - - Args: - node (hou.Node): the name of the node to retrieve the attribute from - - Returns: - str: cbId attribute of the node. - - """ - - if node is not None: - return node.parm("id") - - -def generate_ids(nodes, asset_id=None): - """Returns new unique ids for the given nodes. - - Note: This does not assign the new ids, it only generates the values. - - To assign new ids using this method: - >>> nodes = ["a", "b", "c"] - >>> for node, id in generate_ids(nodes): - >>> set_id(node, id) - - To also override any existing values (and assign regenerated ids): - >>> nodes = ["a", "b", "c"] - >>> for node, id in generate_ids(nodes): - >>> set_id(node, id, overwrite=True) - - Args: - nodes (list): List of nodes. - asset_id (str or bson.ObjectId): The database id for the *asset* to - generate for. When None provided the current asset in the - active session is used. - - Returns: - list: A list of (node, id) tuples. - - """ - - if asset_id is None: - project_name = get_current_project_name() - asset_name = get_current_asset_name() - # Get the asset ID from the database for the asset of current context - asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) - - assert asset_doc, "No current asset found in Session" - asset_id = asset_doc['_id'] - - node_ids = [] - for node in nodes: - _, uid = str(uuid.uuid4()).rsplit("-", 1) - unique_id = "{}:{}".format(asset_id, uid) - node_ids.append((node, unique_id)) - - return node_ids - - -def get_id_required_nodes(): - - valid_types = ["geometry"] - nodes = {n for n in hou.node("/out").children() if - n.type().name() in valid_types} - - return list(nodes) + if folder_entity is None: + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) + return folder_entity["attrib"]["fps"] def get_output_parameter(node): @@ -199,7 +121,7 @@ def validate_fps(): """ - fps = get_asset_fps() + fps = get_folder_fps() current_fps = hou.fps() # returns float if current_fps != fps: @@ -321,7 +243,10 @@ def render_rop(ropnode): try: ropnode.render(verbose=verbose, # Allow Deadline to capture completion percentage - output_progress=verbose) + output_progress=verbose, + # Render only this node + # (do not render any of its dependencies) + ignore_inputs=True) except hou.Error as exc: # The hou.Error is not inherited from a Python Exception class, # so we explicitly capture the houdini error, otherwise pyblish @@ -525,39 +450,41 @@ def maintained_selection(): node.setSelected(on=True) -def reset_framerange(): - """Set frame range and FPS to current asset""" +def reset_framerange(fps=True, frame_range=True): + """Set frame range and FPS to current folder.""" - # Get asset data project_name = get_current_project_name() - asset_name = get_current_asset_name() - # Get the asset ID from the database for the asset of current context - asset_doc = get_asset_by_name(project_name, asset_name) - asset_data = asset_doc["data"] + folder_path = get_current_folder_path() - # Get FPS - fps = get_asset_fps(asset_doc) + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + folder_attributes = folder_entity["attrib"] - # Get Start and End Frames - frame_start = asset_data.get("frameStart") - frame_end = asset_data.get("frameEnd") + # Set FPS + if fps: + fps = get_folder_fps(folder_entity) + print("Setting scene FPS to {}".format(int(fps))) + set_scene_fps(fps) - if frame_start is None or frame_end is None: - log.warning("No edit information found for %s" % asset_name) - return + if frame_range: - handle_start = asset_data.get("handleStart", 0) - handle_end = asset_data.get("handleEnd", 0) + # Set Start and End Frames + frame_start = folder_attributes.get("frameStart") + frame_end = folder_attributes.get("frameEnd") - frame_start -= int(handle_start) - frame_end += int(handle_end) + if frame_start is None or frame_end is None: + log.warning("No edit information found for '%s'", folder_path) + return - # Set frame range and FPS - print("Setting scene FPS to {}".format(int(fps))) - set_scene_fps(fps) - hou.playbar.setFrameRange(frame_start, frame_end) - hou.playbar.setPlaybackRange(frame_start, frame_end) - hou.setFrame(frame_start) + handle_start = folder_attributes.get("handleStart", 0) + handle_end = folder_attributes.get("handleEnd", 0) + + frame_start -= int(handle_start) + frame_end += int(handle_end) + + # Set frame range and FPS + hou.playbar.setFrameRange(frame_start, frame_end) + hou.playbar.setPlaybackRange(frame_start, frame_end) + hou.setFrame(frame_start) def get_main_window(): @@ -641,7 +568,7 @@ def get_frame_data(node, log=None): log.info( "Node '{}' has 'Render current frame' set.\n" - "Asset Handles are ignored.\n" + "Folder Handles are ignored.\n" "frameStart and frameEnd are set to the " "current frame.".format(node.path()) ) @@ -780,31 +707,43 @@ def get_output_children(output_node, include_sops=True): return out_list -def get_resolution_from_doc(doc): - """Get resolution from the given asset document. """ +def get_resolution_from_folder(folder_entity): + """Get resolution from the given folder entity. - if not doc or "data" not in doc: - print("Entered document is not valid. \"{}\"".format(str(doc))) + Args: + folder_entity (dict[str, Any]): Folder entity. + + Returns: + Union[Tuple[int, int], None]: Resolution width and height. + + """ + if not folder_entity or "attrib" not in folder_entity: + print("Entered folder is not valid. \"{}\"".format( + str(folder_entity) + )) return None - resolution_width = doc["data"].get("resolutionWidth") - resolution_height = doc["data"].get("resolutionHeight") + folder_attributes = folder_entity["attrib"] + resolution_width = folder_attributes.get("resolutionWidth") + resolution_height = folder_attributes.get("resolutionHeight") # Make sure both width and height are set if resolution_width is None or resolution_height is None: - print("No resolution information found for \"{}\"".format(doc["name"])) + print("No resolution information found for '{}'".format( + folder_entity["path"] + )) return None return int(resolution_width), int(resolution_height) -def set_camera_resolution(camera, asset_doc=None): - """Apply resolution to camera from asset document of the publish""" +def set_camera_resolution(camera, folder_entity=None): + """Apply resolution to camera from folder entity of the publish""" - if not asset_doc: - asset_doc = get_current_project_asset() + if not folder_entity: + folder_entity = get_current_folder_entity() - resolution = get_resolution_from_doc(asset_doc) + resolution = get_resolution_from_folder(folder_entity) if resolution: print("Setting camera resolution: {} -> {}x{}".format( @@ -827,45 +766,88 @@ def get_camera_from_container(container): return cameras[0] -def get_current_context_template_data_with_asset_data(): - """ - TODOs: - Support both 'assetData' and 'folderData' in future. +def get_current_context_template_data_with_folder_attrs(): """ + Output contains 'folderAttributes' key with folder attribute values. + + Returns: + dict[str, Any]: Template data to fill templates. + + """ context = get_current_context() project_name = context["project_name"] - asset_name = context["folder_path"] + folder_path = context["folder_path"] task_name = context["task_name"] host_name = get_current_host_name() - anatomy = Anatomy(project_name) - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, asset_name) + project_entity = ayon_api.get_project(project_name) + anatomy = Anatomy(project_name, project_entity=project_entity) + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) # get context specific vars - asset_data = asset_doc["data"] + folder_attributes = folder_entity["attrib"] # compute `frameStartHandle` and `frameEndHandle` - frame_start = asset_data.get("frameStart") - frame_end = asset_data.get("frameEnd") - handle_start = asset_data.get("handleStart") - handle_end = asset_data.get("handleEnd") + frame_start = folder_attributes.get("frameStart") + frame_end = folder_attributes.get("frameEnd") + handle_start = folder_attributes.get("handleStart") + handle_end = folder_attributes.get("handleEnd") if frame_start is not None and handle_start is not None: - asset_data["frameStartHandle"] = frame_start - handle_start + folder_attributes["frameStartHandle"] = frame_start - handle_start if frame_end is not None and handle_end is not None: - asset_data["frameEndHandle"] = frame_end + handle_end + folder_attributes["frameEndHandle"] = frame_end + handle_end template_data = get_template_data( - project_doc, asset_doc, task_name, host_name + project_entity, folder_entity, task_entity, host_name ) template_data["root"] = anatomy.roots - template_data["assetData"] = asset_data + template_data["folderAttributes"] = folder_attributes return template_data +def set_review_color_space(opengl_node, review_color_space="", log=None): + """Set ociocolorspace parameter for the given OpenGL node. + + Set `ociocolorspace` parameter of the given OpenGl node + to to the given review_color_space value. + If review_color_space is empty, a default colorspace corresponding to + the display & view of the current Houdini session will be used. + + Args: + opengl_node (hou.Node): ROP node to set its ociocolorspace parm. + review_color_space (str): Colorspace value for ociocolorspace parm. + log (logging.Logger): Logger to log to. + """ + + if log is None: + log = self.log + + # Set Color Correction parameter to OpenColorIO + colorcorrect_parm = opengl_node.parm("colorcorrect") + if colorcorrect_parm.eval() != 2: + colorcorrect_parm.set(2) + log.debug( + "'Color Correction' parm on '{}' has been set to" + " 'OpenColorIO'".format(opengl_node.path()) + ) + + opengl_node.setParms( + {"ociocolorspace": review_color_space} + ) + + log.debug( + "'OCIO Colorspace' parm on '{}' has been set to " + "the view color space '{}'" + .format(opengl_node, review_color_space) + ) + + def get_context_var_changes(): """get context var changes.""" @@ -885,7 +867,7 @@ def get_context_var_changes(): return houdini_vars_to_update # Get Template data - template_data = get_current_context_template_data_with_asset_data() + template_data = get_current_context_template_data_with_folder_attrs() # Set Houdini Vars for item in houdini_vars: @@ -917,7 +899,7 @@ def get_context_var_changes(): def update_houdini_vars_context(): - """Update asset context variables""" + """Update folder context variables""" for var, (_old, new, is_directory) in get_context_var_changes().items(): if is_directory: @@ -936,7 +918,7 @@ def update_houdini_vars_context(): def update_houdini_vars_context_dialog(): - """Show pop-up to update asset context variables""" + """Show pop-up to update folder context variables""" update_vars = get_context_var_changes() if not update_vars: # Nothing to change @@ -952,7 +934,7 @@ def update_houdini_vars_context_dialog(): parent = hou.ui.mainQtWindow() dialog = SimplePopup(parent=parent) dialog.setModal(True) - dialog.setWindowTitle("Houdini scene has outdated asset variables") + dialog.setWindowTitle("Houdini scene has outdated folder variables") dialog.set_message(message) dialog.set_button_text("Fix") @@ -1006,7 +988,7 @@ def self_publish(): Firstly, it gets the node and its dependencies. Then, it deactivates all other ROPs - And finaly, it triggers the publishing action. + And finally, it triggers the publishing action. """ result, comment = hou.ui.readInput( @@ -1054,3 +1036,160 @@ def add_self_publish_button(node): template = node.parmTemplateGroup() template.insertBefore((0,), button_parm) node.setParmTemplateGroup(template) + + +def get_scene_viewer(): + """ + Return an instance of a visible viewport. + + There may be many, some could be closed, any visible are current + + Returns: + Optional[hou.SceneViewer]: A scene viewer, if any. + """ + panes = hou.ui.paneTabs() + panes = [x for x in panes if x.type() == hou.paneTabType.SceneViewer] + panes = sorted(panes, key=lambda x: x.isCurrentTab()) + if panes: + return panes[-1] + + return None + + +def sceneview_snapshot( + sceneview, + filepath="$HIP/thumbnails/$HIPNAME.$F4.jpg", + frame_start=None, + frame_end=None): + """Take a snapshot of your scene view. + + It takes snapshot of your scene view for the given frame range. + So, it's capable of generating snapshots image sequence. + It works in different Houdini context e.g. Objects, Solaris + + Example: + This is how the function can be used:: + + from ayon_core.hosts.houdini.api import lib + sceneview = hou.ui.paneTabOfType(hou.paneTabType.SceneViewer) + lib.sceneview_snapshot(sceneview) + + Notes: + .png output will render poorly, so use .jpg. + + How it works: + Get the current sceneviewer (may be more than one or hidden) + and screengrab the perspective viewport to a file in the + publish location to be picked up with the publish. + + Credits: + https://www.sidefx.com/forum/topic/42808/?page=1#post-354796 + + Args: + sceneview (hou.SceneViewer): The scene view pane from which you want + to take a snapshot. + filepath (str): thumbnail filepath. it expects `$F4` token + when frame_end is bigger than frame_star other wise + each frame will override its predecessor. + frame_start (int): the frame at which snapshot starts + frame_end (int): the frame at which snapshot ends + """ + + if frame_start is None: + frame_start = hou.frame() + if frame_end is None: + frame_end = frame_start + + if not isinstance(sceneview, hou.SceneViewer): + log.debug("Wrong Input. {} is not of type hou.SceneViewer." + .format(sceneview)) + return + viewport = sceneview.curViewport() + + flip_settings = sceneview.flipbookSettings().stash() + flip_settings.frameRange((frame_start, frame_end)) + flip_settings.output(filepath) + flip_settings.outputToMPlay(False) + sceneview.flipbook(viewport, flip_settings) + log.debug("A snapshot of sceneview has been saved to: {}".format(filepath)) + + +def update_content_on_context_change(): + """Update all Creator instances to current asset""" + host = registered_host() + context = host.get_current_context() + + folder_path = context["folder_path"] + task = context["task_name"] + + create_context = CreateContext(host, reset=True) + + for instance in create_context.instances: + instance_folder_path = instance.get("folderPath") + if instance_folder_path and instance_folder_path != folder_path: + instance["folderPath"] = folder_path + instance_task = instance.get("task") + if instance_task and instance_task != task: + instance["task"] = task + + create_context.save_changes() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from ayon_core.style import load_stylesheet + from ayon_core.lib import BoolDef, UILabelDef + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset workfile FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset workfile start and end frame ranges", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + if options["fps"] or options["frame_range"]: + reset_framerange( + fps=options["fps"], + frame_range=options["frame_range"] + ) + + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() diff --git a/client/ayon_core/hosts/houdini/api/pipeline.py b/client/ayon_core/hosts/houdini/api/pipeline.py index cbc94a2408..4797cf36a0 100644 --- a/client/ayon_core/hosts/houdini/api/pipeline.py +++ b/client/ayon_core/hosts/houdini/api/pipeline.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Pipeline tools for OpenPype Houdini integration.""" import os -import sys import logging import hou # noqa @@ -39,6 +38,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") +# Track whether the workfile tool is about to save +_about_to_save = False + class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "houdini" @@ -61,14 +63,16 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): log.info("Installing callbacks ... ") # register_event_callback("init", on_init) self._register_callbacks() + register_event_callback("workfile.save.before", before_workfile_save) register_event_callback("before.save", before_save) register_event_callback("save", on_save) register_event_callback("open", on_open) register_event_callback("new", on_new) + register_event_callback("taskChanged", on_task_changed) self._has_been_setup = True - # Set asset settings for the empty scene directly after launch of + # Set folder settings for the empty scene directly after launch of # Houdini so it initializes into the correct scene FPS, # Frame Range, etc. # TODO: make sure this doesn't trigger when @@ -166,7 +170,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): if not op_ctx: op_ctx = self.create_context_node() - lib.imprint(op_ctx, data) + lib.imprint(op_ctx, data, update=True) def get_context_data(self): op_ctx = hou.node(CONTEXT_CONTAINER) @@ -235,7 +239,7 @@ def containerise(name, "name": name, "namespace": namespace, "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } lib.imprint(container, data) @@ -287,6 +291,11 @@ def ls(): yield parse_container(container) +def before_workfile_save(event): + global _about_to_save + _about_to_save = True + + def before_save(): return lib.validate_fps() @@ -298,9 +307,16 @@ def on_save(): # update houdini vars lib.update_houdini_vars_context_dialog() - nodes = lib.get_id_required_nodes() - for node, new_id in lib.generate_ids(nodes): - lib.set_id(node, new_id, overwrite=False) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + + +def on_task_changed(): + global _about_to_save + if not IS_HEADLESS and _about_to_save: + # Let's prompt the user to update the context settings or not + lib.prompt_reset_context() def _show_outdated_content_popup(): @@ -338,7 +354,7 @@ def on_open(): lib.update_houdini_vars_context_dialog() # Validate FPS after update_task_from_path to - # ensure it is using correct FPS for the asset + # ensure it is using correct FPS for the folder lib.validate_fps() if any_outdated_containers(): @@ -388,7 +404,7 @@ def on_new(): def _set_context_settings(): """Apply the project settings from the project definition - Settings can be overwritten by an asset if the asset.data contains + Settings can be overwritten by a folder if the folder.attrib contains any information regarding those settings. Examples of settings: diff --git a/client/ayon_core/hosts/houdini/api/plugin.py b/client/ayon_core/hosts/houdini/api/plugin.py index 13cf3c9949..a9c8c313b9 100644 --- a/client/ayon_core/hosts/houdini/api/plugin.py +++ b/client/ayon_core/hosts/houdini/api/plugin.py @@ -19,10 +19,6 @@ from ayon_core.lib import BoolDef from .lib import imprint, read, lsattr, add_self_publish_button -class OpenPypeCreatorError(CreatorError): - pass - - class Creator(LegacyCreator): """Creator plugin to create instances in Houdini @@ -92,8 +88,8 @@ class Creator(LegacyCreator): except hou.Error as er: six.reraise( - OpenPypeCreatorError, - OpenPypeCreatorError("Creator error: {}".format(er)), + CreatorError, + CreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) @@ -145,13 +141,12 @@ class HoudiniCreatorBase(object): @staticmethod def create_instance_node( - asset_name, node_name, parent, node_type="geometry" + folder_path, node_name, parent, node_type="geometry" ): - # type: (str, str, str) -> hou.Node """Create node representing instance. Arguments: - asset_name (str): Asset name. + folder_path (str): Folder path. node_name (str): Name of the new node. parent (str): Name of the parent node. node_type (str, optional): Type of the node. @@ -186,10 +181,10 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): if node_type is None: node_type = "geometry" - asset_name = instance_data["folderPath"] + folder_path = instance_data["folderPath"] instance_node = self.create_instance_node( - asset_name, product_name, "/out", node_type) + folder_path, product_name, "/out", node_type) self.customize_node_look(instance_node) @@ -210,8 +205,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): except hou.Error as er: six.reraise( - OpenPypeCreatorError, - OpenPypeCreatorError("Creator error: {}".format(er)), + CreatorError, + CreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) def lock_parameters(self, node, parameters): diff --git a/client/ayon_core/hosts/houdini/api/shelves.py b/client/ayon_core/hosts/houdini/api/shelves.py index b0f5af839e..b178139020 100644 --- a/client/ayon_core/hosts/houdini/api/shelves.py +++ b/client/ayon_core/hosts/houdini/api/shelves.py @@ -10,7 +10,7 @@ from ayon_core.lib import StringTemplate import hou -from .lib import get_current_context_template_data_with_asset_data +from .lib import get_current_context_template_data_with_folder_attrs log = logging.getLogger("ayon_core.hosts.houdini.shelves") @@ -31,7 +31,7 @@ def generate_shelves(): return # Get Template data - template_data = get_current_context_template_data_with_asset_data() + template_data = get_current_context_template_data_with_folder_attrs() for config in shelves_configs: selected_option = config["options"] diff --git a/client/ayon_core/hosts/houdini/api/usd.py b/client/ayon_core/hosts/houdini/api/usd.py index e9c02a0307..ed33fbf590 100644 --- a/client/ayon_core/hosts/houdini/api/usd.py +++ b/client/ayon_core/hosts/houdini/api/usd.py @@ -3,12 +3,16 @@ import contextlib import logging +import ayon_api from qtpy import QtWidgets, QtCore, QtGui from ayon_core import style -from ayon_core.client import get_asset_by_name from ayon_core.pipeline import get_current_project_name -from ayon_core.tools.utils.assets_widget import SingleSelectAssetsWidget +from ayon_core.tools.utils import ( + PlaceholderLineEdit, + RefreshButton, + SimpleFoldersWidget, +) from pxr import Sdf @@ -16,77 +20,110 @@ from pxr import Sdf log = logging.getLogger(__name__) -class SelectAssetDialog(QtWidgets.QWidget): - """Frameless assets dialog to select asset with double click. +class SelectFolderDialog(QtWidgets.QWidget): + """Frameless folders dialog to select folder with double click. Args: - parm: Parameter where selected asset name is set. + parm: Parameter where selected folder path is set. """ def __init__(self, parm): - self.setWindowTitle("Pick Asset") + self.setWindowTitle("Pick Folder") self.setWindowFlags(QtCore.Qt.FramelessWindowHint | QtCore.Qt.Popup) - assets_widget = SingleSelectAssetsWidget(self) - assets_widget.set_project_name(get_current_project_name(), False) + header_widget = QtWidgets.QWidget(self) + + filter_input = PlaceholderLineEdit(header_widget) + filter_input.setPlaceholderText("Filter folders..") + + refresh_btn = RefreshButton(self) + + header_layout = QtWidgets.QHBoxLayout(header_widget) + header_layout.setContentsMargins(0, 0, 0, 0) + header_layout.addWidget(filter_input) + header_layout.addWidget(refresh_btn) + + for widget in ( + refresh_btn, + filter_input, + ): + size_policy = widget.sizePolicy() + size_policy.setVerticalPolicy( + QtWidgets.QSizePolicy.MinimumExpanding) + widget.setSizePolicy(size_policy) + + folders_widget = SimpleFoldersWidget(self) + folders_widget.set_project_name(get_current_project_name()) layout = QtWidgets.QHBoxLayout(self) - layout.addWidget(assets_widget) + layout.addWidget(header_widget, 0) + layout.addWidget(folders_widget, 1) - assets_widget.double_clicked.connect(self._set_parameter) - self._assets_widget = assets_widget + folders_widget.double_clicked.connect(self._set_parameter) + filter_input.textChanged.connect(self._on_filter_change) + refresh_btn.clicked.connect(self._on_refresh_clicked) + + self._folders_widget = folders_widget self._parm = parm + def _on_refresh_clicked(self): + self._folders_widget.refresh() + + def _on_filter_change(self, text): + self._folders_widget.set_name_filter(text) + def _set_parameter(self): - name = self._assets_widget.get_selected_asset_name() - self._parm.set(name) + folder_path = self._folders_widget.get_selected_folder_path() + self._parm.set(folder_path) self.close() def _on_show(self): pos = QtGui.QCursor.pos() - # Select the current asset if there is any + # Select the current folder if there is any select_id = None - name = self._parm.eval() - if name: + folder_path = self._parm.eval() + if folder_path: project_name = get_current_project_name() - db_asset = get_asset_by_name(project_name, name, fields=["_id"]) - if db_asset: - select_id = db_asset["_id"] + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + if folder_entity: + select_id = folder_entity["id"] # Set stylesheet self.setStyleSheet(style.load_stylesheet()) - # Refresh assets (is threaded) - self._assets_widget.refresh() - # Select asset - must be done after refresh + # Refresh folders (is threaded) + self._folders_widget.refresh() + # Select folder - must be done after refresh if select_id is not None: - self._assets_widget.select_asset(select_id) + self._folders_widget.set_selected_folder(select_id) # Show cursor (top right of window) near cursor self.resize(250, 400) self.move(self.mapFromGlobal(pos) - QtCore.QPoint(self.width(), 0)) def showEvent(self, event): - super(SelectAssetDialog, self).showEvent(event) + super(SelectFolderDialog, self).showEvent(event) self._on_show() -def pick_asset(node): - """Show a user interface to select an Asset in the project +def pick_folder(node): + """Show a user interface to select an Folder in the project - When double clicking an asset it will set the Asset value in the - 'asset' parameter. + When double clicking an folder it will set the Folder value in the + 'folderPath' parameter. """ - parm = node.parm("asset_name") + parm = node.parm("folderPath") if not parm: - log.error("Node has no 'asset' parameter: %s", node) + log.error("Node has no 'folderPath' parameter: %s", node) return # Construct a frameless popup so it automatically # closes when clicked outside of it. global tool - tool = SelectAssetDialog(parm) + tool = SelectFolderDialog(parm) tool.show() diff --git a/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py new file mode 100644 index 0000000000..2e97c06bff --- /dev/null +++ b/client/ayon_core/hosts/houdini/hooks/set_default_display_and_view.py @@ -0,0 +1,58 @@ +from ayon_applications import PreLaunchHook, LaunchTypes + + +class SetDefaultDisplayView(PreLaunchHook): + """Set default view and default display for houdini via OpenColorIO. + + Houdini's defaultDisplay and defaultView are set by + setting 'OCIO_ACTIVE_DISPLAYS' and 'OCIO_ACTIVE_VIEWS' + environment variables respectively. + + More info: https://www.sidefx.com/docs/houdini/io/ocio.html#set-up + """ + + app_groups = {"houdini"} + launch_types = {LaunchTypes.local} + + def execute(self): + + OCIO = self.launch_context.env.get("OCIO") + + # This is a cheap way to skip this hook if either global color + # management or houdini color management was disabled because the + # OCIO var would be set by the global OCIOEnvHook + if not OCIO: + return + + houdini_color_settings = \ + self.data["project_settings"]["houdini"]["imageio"]["workfile"] + + if not houdini_color_settings["enabled"]: + self.log.info( + "Houdini workfile color management is disabled." + ) + return + + # 'OCIO_ACTIVE_DISPLAYS', 'OCIO_ACTIVE_VIEWS' are checked + # as Admins can add them in Ayon env vars or Ayon tools. + + default_display = houdini_color_settings["default_display"] + if default_display: + # get 'OCIO_ACTIVE_DISPLAYS' value if exists. + self._set_context_env("OCIO_ACTIVE_DISPLAYS", default_display) + + default_view = houdini_color_settings["default_view"] + if default_view: + # get 'OCIO_ACTIVE_VIEWS' value if exists. + self._set_context_env("OCIO_ACTIVE_VIEWS", default_view) + + def _set_context_env(self, env_var, default_value): + env_value = self.launch_context.env.get(env_var, "") + new_value = ":".join( + key for key in [default_value, env_value] if key + ) + self.log.info( + "Setting {} environment to: {}" + .format(env_var, new_value) + ) + self.launch_context.env[env_var] = new_value diff --git a/client/ayon_core/hosts/houdini/hooks/set_paths.py b/client/ayon_core/hosts/houdini/hooks/set_paths.py index 7eb346cc74..4b89ebe944 100644 --- a/client/ayon_core/hosts/houdini/hooks/set_paths.py +++ b/client/ayon_core/hosts/houdini/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py b/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py index 008187d9c8..1a4761172a 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/houdini/plugins/create/convert_legacy.py @@ -1,10 +1,10 @@ # -*- coding: utf-8 -*- """Converter for legacy Houdini products.""" -from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin +from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin from ayon_core.hosts.houdini.api.lib import imprint -class HoudiniLegacyConvertor(SubsetConvertorPlugin): +class HoudiniLegacyConvertor(ProductConvertorPlugin): """Find and convert any legacy products in the scene. This Converter will find all legacy products in the scene and will diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py index b61b4cbd46..0ab5e2794e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating alembic camera products.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou @@ -23,7 +23,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator): instance = super(CreateAlembicCamera, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) parms = { diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py index 6d992f136a..be5604c01c 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_ass.py @@ -29,7 +29,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance = super(CreateArnoldAss, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py index b7c5910a4f..f65b54a452 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_arnold_rop.py @@ -31,7 +31,7 @@ class CreateArnoldRop(plugin.HoudiniCreator): instance = super(CreateArnoldRop, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py index 92c89c71cb..3749598b1d 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_bgeo.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating pointcache bgeo files.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou from ayon_core.lib import EnumDef, BoolDef @@ -25,7 +25,7 @@ class CreateBGEO(plugin.HoudiniCreator): instance = super(CreateBGEO, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py index a1104e5093..a25faf0e8e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_composite.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_composite.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating composite sequences.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance, CreatorError +from ayon_core.pipeline import CreatorError import hou @@ -25,7 +25,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): instance = super(CreateCompositeSequence, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) filepath = "{}{}".format( diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py index 994977de7d..d399aa5e15 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_hda.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_hda.py @@ -1,9 +1,8 @@ # -*- coding: utf-8 -*- """Creator plugin for creating publishable Houdini Digital Assets.""" -from ayon_core.client import ( - get_asset_by_name, - get_subsets, -) +import ayon_api + +from ayon_core.pipeline import CreatorError from ayon_core.hosts.houdini.api import plugin import hou @@ -17,25 +16,25 @@ class CreateHDA(plugin.HoudiniCreator): icon = "gears" maintain_selection = False - def _check_existing(self, asset_name, product_name): - # type: (str) -> bool + def _check_existing(self, folder_path, product_name): + # type: (str, str) -> bool """Check if existing product name versions already exists.""" # Get all products of the current folder project_name = self.project_name - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["_id"] + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} ) - subset_docs = get_subsets( - project_name, asset_ids=[asset_doc["_id"]], fields=["name"] + product_entities = ayon_api.get_products( + project_name, folder_ids={folder_entity["id"]}, fields={"name"} ) existing_product_names_low = { - subset_doc["name"].lower() - for subset_doc in subset_docs + product_entity["name"].lower() + for product_entity in product_entities } return product_name.lower() in existing_product_names_low def create_instance_node( - self, asset_name, node_name, parent, node_type="geometry" + self, folder_path, node_name, parent, node_type="geometry" ): parent_node = hou.node("/obj") @@ -54,7 +53,7 @@ class CreateHDA(plugin.HoudiniCreator): # if node type has not its definition, it is not user # created hda. We test if hda can be created from the node. if not to_hda.canCreateDigitalAsset(): - raise plugin.OpenPypeCreatorError( + raise CreatorError( "cannot create hda from node {}".format(to_hda)) hda_node = to_hda.createDigitalAsset( @@ -62,8 +61,8 @@ class CreateHDA(plugin.HoudiniCreator): hda_file_name="$HIP/{}.hda".format(node_name) ) hda_node.layoutChildren() - elif self._check_existing(asset_name, node_name): - raise plugin.OpenPypeCreatorError( + elif self._check_existing(folder_path, node_name): + raise CreatorError( ("product {} is already published with different HDA" "definition.").format(node_name)) else: @@ -79,7 +78,7 @@ class CreateHDA(plugin.HoudiniCreator): instance = super(CreateHDA, self).create( product_name, instance_data, - pre_create_data) # type: plugin.CreatedInstance + pre_create_data) return instance diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py index 9eb9d80cd3..e91ddbc0ac 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_karma_rop.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin to create Karma ROP.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef, EnumDef, NumberDef @@ -25,7 +24,7 @@ class CreateKarmaROP(plugin.HoudiniCreator): instance = super(CreateKarmaROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py index bb10f3893c..e0cf035c35 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_ifd.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating pointcache alembics.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef @@ -22,7 +21,7 @@ class CreateMantraIFD(plugin.HoudiniCreator): instance = super(CreateMantraIFD, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py index f15f49f463..64ecf428e9 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_mantra_rop.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin to create Mantra ROP.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import EnumDef, BoolDef @@ -28,7 +27,7 @@ class CreateMantraROP(plugin.HoudiniCreator): instance = super(CreateMantraROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py index 3d6d657cf0..1cd239e929 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_redshift_rop.py @@ -2,6 +2,7 @@ """Creator plugin to create Redshift ROP.""" import hou # noqa +from ayon_core.pipeline import CreatorError from ayon_core.hosts.houdini.api import plugin from ayon_core.lib import EnumDef, BoolDef @@ -14,6 +15,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator): product_type = "redshift_rop" icon = "magic" ext = "exr" + multi_layered_mode = "No Multi-Layered EXR File" # Default to split export and render jobs split_render = True @@ -42,7 +44,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator): "Redshift_IPR", node_name=f"{basename}_IPR" ) except hou.OperationFailed as e: - raise plugin.OpenPypeCreatorError( + raise CreatorError( ( "Cannot create Redshift node. Is Redshift " "installed and enabled?" @@ -54,25 +56,36 @@ class CreateRedshiftROP(plugin.HoudiniCreator): # Set the linked rop to the Redshift ROP ipr_rop.parm("linked_rop").set(instance_node.path()) - ext = pre_create_data.get("image_format") - filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format( - renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), - product_name=product_name, - fmt="${aov}.$F4.{ext}".format(aov="AOV", ext=ext) - ) + multi_layered_mode = pre_create_data.get("multi_layered_mode") ext_format_index = {"exr": 0, "tif": 1, "jpg": 2, "png": 3} + multilayer_mode_index = {"No Multi-Layered EXR File": "1", + "Full Multi-Layered EXR File": "2" } + + filepath = "{renders_dir}{product_name}/{product_name}.{fmt}".format( + renders_dir=hou.text.expandString("$HIP/pyblish/renders/"), + product_name=product_name, + fmt="$AOV.$F4.{ext}".format(ext=ext) + ) + + if multilayer_mode_index[multi_layered_mode] == "1": + multipart = False + + elif multilayer_mode_index[multi_layered_mode] == "2": + multipart = True parms = { # Render frame range "trange": 1, # Redshift ROP settings "RS_outputFileNamePrefix": filepath, - "RS_outputMultilayerMode": "1", # no multi-layered exr "RS_outputBeautyAOVSuffix": "beauty", "RS_outputFileFormat": ext_format_index[ext], } + if ext == "exr": + parms["RS_outputMultilayerMode"] = multilayer_mode_index[multi_layered_mode] + parms["RS_aovMultipart"] = multipart if self.selected_nodes: # set up the render camera from the selected node @@ -110,6 +123,11 @@ class CreateRedshiftROP(plugin.HoudiniCreator): image_format_enum = [ "exr", "tif", "jpg", "png", ] + multi_layered_mode = [ + "No Multi-Layered EXR File", + "Full Multi-Layered EXR File" + ] + return attrs + [ BoolDef("farm", @@ -121,5 +139,9 @@ class CreateRedshiftROP(plugin.HoudiniCreator): EnumDef("image_format", image_format_enum, default=self.ext, - label="Image Format Options") + label="Image Format Options"), + EnumDef("multi_layered_mode", + multi_layered_mode, + default=self.multi_layered_mode, + label="Multi-Layered EXR") ] diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_review.py b/client/ayon_core/hosts/houdini/plugins/create/create_review.py index 18f7ce498d..94dcf23181 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_review.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_review.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating openGL reviews.""" -from ayon_core.hosts.houdini.api import plugin +from ayon_core.hosts.houdini.api import lib, plugin from ayon_core.lib import EnumDef, BoolDef, NumberDef import os @@ -14,6 +14,13 @@ class CreateReview(plugin.HoudiniCreator): label = "Review" product_type = "review" icon = "video-camera" + review_color_space = "" + + def apply_settings(self, project_settings): + super(CreateReview, self).apply_settings(project_settings) + color_settings = project_settings["houdini"]["imageio"]["workfile"] + if color_settings["enabled"]: + self.review_color_space = color_settings.get("review_color_space") def create(self, product_name, instance_data, pre_create_data): @@ -85,10 +92,20 @@ class CreateReview(plugin.HoudiniCreator): instance_node.setParms(parms) - # Set OCIO Colorspace to the default output colorspace + # Set OCIO Colorspace to the default colorspace # if there's OCIO if os.getenv("OCIO"): - self.set_colorcorrect_to_default_view_space(instance_node) + # Fall to the default value if cls.review_color_space is empty. + if not self.review_color_space: + # cls.review_color_space is an empty string + # when the imageio/workfile setting is disabled or + # when the Review colorspace setting is empty. + from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa + self.review_color_space = get_default_display_view_colorspace() + + lib.set_review_color_space(instance_node, + self.review_color_space, + self.log) to_lock = ["id", "productType"] @@ -131,23 +148,3 @@ class CreateReview(plugin.HoudiniCreator): minimum=0.0001, decimals=3) ] - - def set_colorcorrect_to_default_view_space(self, - instance_node): - """Set ociocolorspace to the default output space.""" - from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa - - # set Color Correction parameter to OpenColorIO - instance_node.setParms({"colorcorrect": 2}) - - # Get default view space for ociocolorspace parm. - default_view_space = get_default_display_view_colorspace() - instance_node.setParms( - {"ociocolorspace": default_view_space} - ) - - self.log.debug( - "'OCIO Colorspace' parm on '{}' has been set to " - "the default view color space '{}'" - .format(instance_node, default_view_space) - ) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py b/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py index bc8a2507cd..3271107c6e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_staticmesh.py @@ -88,16 +88,27 @@ class CreateStaticMesh(plugin.HoudiniCreator): return attrs + [createsubnetroot, vcformat, convert_units] def get_dynamic_data( - self, project_name, asset_doc, task_name, variant, host_name, instance + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ): """ The default prodcut name templates for Unreal include {asset} and thus we should pass that along as dynamic data. """ dynamic_data = super(CreateStaticMesh, self).get_dynamic_data( - project_name, asset_doc, task_name, variant, host_name, instance + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ) - dynamic_data["asset"] = asset_doc["name"] + dynamic_data["asset"] = folder_entity["name"] return dynamic_data def get_selection(self): diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py index ee05639368..700f7eefd6 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usd.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usd.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating USDs.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance import hou @@ -22,7 +21,7 @@ class CreateUSD(plugin.HoudiniCreator): instance = super(CreateUSD, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py index 0a5c8896a8..36197e349e 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_usdrender.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating USD renders.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance class CreateUSDRender(plugin.HoudiniCreator): @@ -23,7 +22,7 @@ class CreateUSDRender(plugin.HoudiniCreator): instance = super(CreateUSDRender, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py index 9ac7ebdff7..c34cd2b4b5 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Creator plugin for creating VDB Caches.""" from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance from ayon_core.lib import BoolDef import hou @@ -26,7 +25,7 @@ class CreateVDBCache(plugin.HoudiniCreator): instance = super(CreateVDBCache, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) file_path = "{}{}".format( diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py index 739796dc7c..5ed9e848a7 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_vray_rop.py @@ -3,7 +3,7 @@ import hou from ayon_core.hosts.houdini.api import plugin -from ayon_core.pipeline import CreatedInstance +from ayon_core.pipeline import CreatorError from ayon_core.lib import EnumDef, BoolDef @@ -31,7 +31,7 @@ class CreateVrayROP(plugin.HoudiniCreator): instance = super(CreateVrayROP, self).create( product_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) instance_node = hou.node(instance.get("instance_node")) @@ -42,7 +42,7 @@ class CreateVrayROP(plugin.HoudiniCreator): "vray", node_name=basename + "_IPR" ) except hou.OperationFailed: - raise plugin.OpenPypeCreatorError( + raise CreatorError( "Cannot create Vray render node. " "Make sure Vray installed and enabled!" ) diff --git a/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py b/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py index 631ef6ce77..a958509e25 100644 --- a/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/houdini/plugins/create/create_workfile.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- """Creator plugin for creating workfiles.""" +import ayon_api + from ayon_core.hosts.houdini.api import plugin from ayon_core.hosts.houdini.api.lib import read, imprint from ayon_core.hosts.houdini.api.pipeline import CONTEXT_CONTAINER from ayon_core.pipeline import CreatedInstance, AutoCreator -from ayon_core.client import get_asset_by_name import hou @@ -26,26 +27,31 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): ), None) project_name = self.project_name - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() task_name = self.create_context.get_current_task_name() host_name = self.host_name if current_instance is None: - current_instance_asset = None + current_folder_path = None else: - current_instance_asset = current_instance["folderPath"] + current_folder_path = current_instance["folderPath"] if current_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": variant, } @@ -53,8 +59,8 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): data.update( self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, current_instance) @@ -65,19 +71,24 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): ) self._add_instance_to_context(current_instance) elif ( - current_instance_asset != asset_name + current_folder_path != folder_path or current_instance["task"] != task_name ): # Update instance context if is not the same - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) - current_instance["folderPath"] = asset_name + current_instance["folderPath"] = folder_path current_instance["task"] = task_name current_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py index dadb80469a..4cebd537bb 100644 --- a/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py +++ b/client/ayon_core/hosts/houdini/plugins/inventory/set_camera_resolution.py @@ -3,7 +3,7 @@ from ayon_core.hosts.houdini.api.lib import ( get_camera_from_container, set_camera_resolution ) -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_folder_entity class SetCameraResolution(InventoryAction): @@ -19,8 +19,8 @@ class SetCameraResolution(InventoryAction): ) def process(self, containers): - asset_doc = get_current_project_asset() + folder_entity = get_current_folder_entity() for container in containers: node = container["node"] camera = get_camera_from_container(node) - set_camera_resolution(camera, asset_doc) + set_camera_resolution(camera, folder_entity) diff --git a/client/ayon_core/hosts/houdini/plugins/load/actions.py b/client/ayon_core/hosts/houdini/plugins/load/actions.py index 2cffa565b1..3e9cc35504 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/actions.py +++ b/client/ayon_core/hosts/houdini/plugins/load/actions.py @@ -8,14 +8,14 @@ from ayon_core.pipeline import load class SetFrameRangeLoader(load.LoaderPlugin): """Set frame range excluding pre- and post-handles""" - families = [ + product_types = { "animation", "camera", "pointcache", "vdbcache", "usd", - ] - representations = ["abc", "vdb", "usd"] + } + representations = {"abc", "vdb", "usd"} label = "Set frame range" order = 11 @@ -26,11 +26,10 @@ class SetFrameRangeLoader(load.LoaderPlugin): import hou - version = context["version"] - version_data = version.get("data", {}) + version_attributes = context["version"]["attrib"] - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") if start is None or end is None: print( @@ -46,14 +45,14 @@ class SetFrameRangeLoader(load.LoaderPlugin): class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): """Set frame range including pre- and post-handles""" - families = [ + product_types = { "animation", "camera", "pointcache", "vdbcache", "usd", - ] - representations = ["abc", "vdb", "usd"] + } + representations = {"abc", "vdb", "usd"} label = "Set frame range (with handles)" order = 12 @@ -64,11 +63,10 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): import hou - version = context["version"] - version_data = version.get("data", {}) + version_attributes = context["version"]["attrib"] - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") if start is None or end is None: print( @@ -78,8 +76,8 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): return # Include handles - start -= version_data.get("handleStart", 0) - end += version_data.get("handleEnd", 0) + start -= version_attributes.get("handleStart", 0) + end += version_attributes.get("handleEnd", 0) hou.playbar.setFrameRange(start, end) hou.playbar.setPlaybackRange(start, end) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py index 6996b0d117..5f04781501 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic.py @@ -9,9 +9,10 @@ from ayon_core.hosts.houdini.api import pipeline class AbcLoader(load.LoaderPlugin): """Load Alembic""" - families = ["model", "animation", "pointcache", "gpuCache"] + product_types = {"model", "animation", "pointcache", "gpuCache"} label = "Load Alembic" - representations = ["abc"] + representations = {"*"} + extensions = {"abc"} order = -10 icon = "code-fork" color = "orange" @@ -28,7 +29,7 @@ class AbcLoader(load.LoaderPlugin): obj = hou.node("/obj") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create a new geo node @@ -44,33 +45,11 @@ class AbcLoader(load.LoaderPlugin): alembic = container.createNode("alembic", node_name=node_name) alembic.setParms({"fileName": file_path}) - # Add unpack node - unpack_name = "unpack_{}".format(name) - unpack = container.createNode("unpack", node_name=unpack_name) - unpack.setInput(0, alembic) - unpack.setParms({"transfer_attributes": "path"}) + # Position nodes nicely + container.moveToGoodPosition() + container.layoutChildren() - # Add normal to points - # Order of menu ['point', 'vertex', 'prim', 'detail'] - normal_name = "normal_{}".format(name) - normal_node = container.createNode("normal", node_name=normal_name) - normal_node.setParms({"type": 0}) - - normal_node.setInput(0, unpack) - - null = container.createNode("null", node_name="OUT".format(name)) - null.setInput(0, normal_node) - - # Ensure display flag is on the Alembic input node and not on the OUT - # node to optimize "debug" displaying in the viewport. - alembic.setDisplayFlag(True) - - # Set new position for unpack node else it gets cluttered - nodes = [container, alembic, unpack, normal_node, null] - for nr, node in enumerate(nodes): - node.setPosition([0, (0 - nr)]) - - self[:] = nodes + nodes = [container, alembic] return pipeline.containerise( node_name, @@ -81,8 +60,8 @@ class AbcLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] try: alembic_node = next( @@ -93,18 +72,18 @@ class AbcLoader(load.LoaderPlugin): return # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) file_path = file_path.replace("\\", "/") alembic_node.setParms({"fileName": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": repre_entity["id"]}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py b/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py index cfe3b16ebb..a231bd9993 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_alembic_archive.py @@ -9,9 +9,10 @@ from ayon_core.hosts.houdini.api import pipeline class AbcArchiveLoader(load.LoaderPlugin): """Load Alembic as full geometry network hierarchy """ - families = ["model", "animation", "pointcache", "gpuCache"] + product_types = {"model", "animation", "pointcache", "gpuCache"} label = "Load Alembic as Archive" - representations = ["abc"] + representations = {"*"} + extensions = {"abc"} order = -5 icon = "code-fork" color = "orange" @@ -29,14 +30,14 @@ class AbcArchiveLoader(load.LoaderPlugin): obj = hou.node("/obj") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create an Alembic archive node node = obj.createNode("alembicarchive", node_name=node_name) node.moveToGoodPosition() - # TODO: add FPS of project / asset + # TODO: add FPS of project / folder node.setParms({"fileName": file_path, "channelRef": True}) @@ -55,17 +56,17 @@ class AbcArchiveLoader(load.LoaderPlugin): self.__class__.__name__, suffix="") - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) file_path = file_path.replace("\\", "/") # Update attributes node.setParms({"fileName": file_path, - "representation": str(representation["_id"])}) + "representation": repre_entity["id"]}) # Rebuild node.parm("buildHierarchy").pressButton() @@ -75,5 +76,5 @@ class AbcArchiveLoader(load.LoaderPlugin): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_ass.py b/client/ayon_core/hosts/houdini/plugins/load/load_ass.py index 6fbe315adb..6e0922e305 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_ass.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_ass.py @@ -11,9 +11,9 @@ from ayon_core.hosts.houdini.api import pipeline class AssLoader(load.LoaderPlugin): """Load .ass with Arnold Procedural""" - families = ["ass"] + product_types = {"ass"} label = "Load Arnold Procedural" - representations = ["ass"] + representations = {"ass"} order = -10 icon = "code-fork" color = "orange" @@ -25,7 +25,7 @@ class AssLoader(load.LoaderPlugin): obj = hou.node("/obj") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create a new geo node @@ -48,13 +48,14 @@ class AssLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): + def update(self, container, context): # Update the file path + repre_entity = context["representation"] procedural = container["node"] - procedural.setParms({"ar_filename": self.format_path(representation)}) + procedural.setParms({"ar_filename": self.format_path(repre_entity)}) # Update attribute - procedural.setParms({"representation": str(representation["_id"])}) + procedural.setParms({"representation": repre_entity["id"]}) def remove(self, container): node = container["node"] @@ -86,5 +87,5 @@ class AssLoader(load.LoaderPlugin): return os.path.normpath(path).replace("\\", "/") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py b/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py index afcf82562c..a318b71963 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_bgeo.py @@ -13,10 +13,10 @@ class BgeoLoader(load.LoaderPlugin): """Load bgeo files to Houdini.""" label = "Load bgeo" - families = ["model", "pointcache", "bgeo"] - representations = [ + product_types = {"model", "pointcache", "bgeo"} + representations = { "bgeo", "bgeosc", "bgeogz", - "bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"] + "bgeo.sc", "bgeo.gz", "bgeo.lzma", "bgeo.bz2"} order = -10 icon = "code-fork" color = "orange" @@ -29,7 +29,7 @@ class BgeoLoader(load.LoaderPlugin): obj = hou.node("/obj") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create a new geo node @@ -82,8 +82,8 @@ class BgeoLoader(load.LoaderPlugin): return filename - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] try: file_node = next( @@ -94,18 +94,18 @@ class BgeoLoader(load.LoaderPlugin): return # Update the file path - file_path = get_representation_path(representation) - file_path = self.format_path(file_path, representation) + file_path = get_representation_path(repre_entity) + file_path = self.format_path(file_path, repre_entity) file_node.setParms({"file": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": repre_entity["id"]}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py index 11826fb30d..b7912f88f1 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_camera.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_camera.py @@ -87,9 +87,9 @@ def transfer_non_default_values(src, dest, ignore=None): class CameraLoader(load.LoaderPlugin): """Load camera from an Alembic file""" - families = ["camera"] + product_types = {"camera"} label = "Load Camera (abc)" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" @@ -104,13 +104,13 @@ class CameraLoader(load.LoaderPlugin): obj = hou.node("/obj") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create a archive node node = self.create_and_connect(obj, "alembicarchive", node_name) - # TODO: add FPS of project / asset + # TODO: add FPS of project / folder node.setParms({"fileName": file_path, "channelRef": True}) # Apply some magic @@ -122,7 +122,7 @@ class CameraLoader(load.LoaderPlugin): camera = get_camera_from_container(node) self._match_maya_render_mask(camera) - set_camera_resolution(camera, asset_doc=context["asset"]) + set_camera_resolution(camera, folder_entity=context["folder"]) self[:] = nodes return pipeline.containerise(node_name, @@ -132,17 +132,17 @@ class CameraLoader(load.LoaderPlugin): self.__class__.__name__, suffix="") - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) file_path = file_path.replace("\\", "/") # Update attributes node.setParms({"fileName": file_path, - "representation": str(representation["_id"])}) + "representation": repre_entity["id"]}) # Store the cam temporarily next to the Alembic Archive # so that we can preserve parm values the user set on it @@ -167,6 +167,9 @@ class CameraLoader(load.LoaderPlugin): temp_camera.destroy() + def switch(self, container, context): + self.update(container, context) + def remove(self, container): node = container["node"] @@ -195,7 +198,6 @@ class CameraLoader(load.LoaderPlugin): def _match_maya_render_mask(self, camera): """Workaround to match Maya render mask in Houdini""" - # print("Setting match maya render mask ") parm = camera.parm("aperture") expression = parm.expression() expression = expression.replace("return ", "aperture = ") diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py b/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py index c750874719..398019a3bd 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_fbx.py @@ -16,8 +16,8 @@ class FbxLoader(load.LoaderPlugin): order = -10 - families = ["*"] - representations = ["*"] + product_types = {"*"} + representations = {"*"} extensions = {"fbx"} def load(self, context, name=None, namespace=None, data=None): @@ -47,8 +47,8 @@ class FbxLoader(load.LoaderPlugin): return containerised_nodes - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] try: file_node = next( @@ -59,27 +59,27 @@ class FbxLoader(load.LoaderPlugin): return # Update the file path from representation - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) file_path = file_path.replace("\\", "/") file_node.setParms({"file": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": repre_entity["id"]}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def get_node_name(self, context, name=None, namespace=None): """Define node name.""" if not namespace: - namespace = context["asset"]["name"] + namespace = context["folder"]["name"] if namespace: node_name = "{}_{}".format(namespace, name) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py new file mode 100644 index 0000000000..d189a027fd --- /dev/null +++ b/client/ayon_core/hosts/houdini/plugins/load/load_filepath.py @@ -0,0 +1,129 @@ +import os +import re + +from ayon_core.pipeline import load +from ayon_core.hosts.houdini.api import pipeline + +import hou + + +class FilePathLoader(load.LoaderPlugin): + """Load a managed filepath to a null node. + + This is useful if for a particular workflow there is no existing loader + yet. A Houdini artists can load as the generic filepath loader and then + reference the relevant Houdini parm to use the exact value. The benefit + is that this filepath will be managed and can be updated as usual. + + """ + + label = "Load filepath to node" + order = 9 + icon = "link" + color = "white" + product_types = {"*"} + representations = {"*"} + + def load(self, context, name=None, namespace=None, data=None): + + # Get the root node + obj = hou.node("/obj") + + # Define node name + namespace = namespace if namespace else context["folder"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create a null node + container = obj.createNode("null", node_name=node_name) + + # Destroy any children + for node in container.children(): + node.destroy() + + # Add filepath attribute, set value as default value + filepath = self.format_path( + path=self.filepath_from_context(context), + representation=context["representation"] + ) + parm_template_group = container.parmTemplateGroup() + attr_folder = hou.FolderParmTemplate("attributes_folder", "Attributes") + parm = hou.StringParmTemplate(name="filepath", + label="Filepath", + num_components=1, + default_value=(filepath,)) + attr_folder.addParmTemplate(parm) + parm_template_group.append(attr_folder) + + # Hide some default labels + for folder_label in ["Transform", "Render", "Misc", "Redshift OBJ"]: + folder = parm_template_group.findFolder(folder_label) + if not folder: + continue + parm_template_group.hideFolder(folder_label, True) + + container.setParmTemplateGroup(parm_template_group) + + container.setDisplayFlag(False) + container.setSelectableInViewport(False) + container.useXray(False) + + nodes = [container] + + self[:] = nodes + + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) + + def update(self, container, context): + + # Update the file path + representation_entity = context["representation"] + file_path = self.format_path( + path=self.filepath_from_context(context), + representation=representation_entity + ) + + node = container["node"] + node.setParms({ + "filepath": file_path, + "representation": str(representation_entity["id"]) + }) + + # Update the parameter default value (cosmetics) + parm_template_group = node.parmTemplateGroup() + parm = parm_template_group.find("filepath") + parm.setDefaultValue((file_path,)) + parm_template_group.replace(parm_template_group.find("filepath"), + parm) + node.setParmTemplateGroup(parm_template_group) + + def switch(self, container, context): + self.update(container, context) + + def remove(self, container): + + node = container["node"] + node.destroy() + + @staticmethod + def format_path(path: str, representation: dict) -> str: + """Format file path for sequence with $F.""" + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) + + # The path is either a single file or sequence in a folder. + frame = representation["context"].get("frame") + if frame is not None: + # Substitute frame number in sequence with $F with padding + ext = representation.get("ext", representation["name"]) + token = "$F{}".format(len(frame)) # e.g. $F4 + pattern = r"\.(\d+)\.{ext}$".format(ext=re.escape(ext)) + path = re.sub(pattern, ".{}.{}".format(token, ext), path) + + return os.path.normpath(path).replace("\\", "/") diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_hda.py b/client/ayon_core/hosts/houdini/plugins/load/load_hda.py index 288152f2bd..10fc03be03 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_hda.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_hda.py @@ -10,9 +10,9 @@ from ayon_core.hosts.houdini.api import pipeline class HdaLoader(load.LoaderPlugin): """Load Houdini Digital Asset file.""" - families = ["hda"] + product_types = {"hda"} label = "Load Hda" - representations = ["hda"] + representations = {"hda"} order = -10 icon = "code-fork" color = "orange" @@ -30,7 +30,7 @@ class HdaLoader(load.LoaderPlugin): # Create a unique name counter = 1 - namespace = namespace or context["asset"]["name"] + namespace = namespace or context["folder"]["name"] formatted = "{}_{}".format(namespace, name) if namespace else name node_name = "{0}_{1:03d}".format(formatted, counter) @@ -48,11 +48,12 @@ class HdaLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): + def update(self, container, context): import hou + repre_entity = context["representation"] hda_node = container["node"] - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) file_path = file_path.replace("\\", "/") hou.hda.installFile(file_path) defs = hda_node.type().allInstalledDefinitions() @@ -60,7 +61,7 @@ class HdaLoader(load.LoaderPlugin): new = def_paths.index(file_path) defs[new].setIsPreferred(True) hda_node.setParms({ - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) def remove(self, container): diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_image.py b/client/ayon_core/hosts/houdini/plugins/load/load_image.py index 20fe2f87ca..dfbd3c11eb 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_image.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_image.py @@ -1,4 +1,5 @@ import os +import re from ayon_core.pipeline import ( load, @@ -44,9 +45,16 @@ def get_image_avalon_container(): class ImageLoader(load.LoaderPlugin): """Load images into COP2""" - families = ["imagesequence"] + product_types = { + "imagesequence", + "review", + "render", + "plate", + "image", + "online", + } label = "Load Image (COP2)" - representations = ["*"] + representations = {"*"} order = -10 icon = "code-fork" @@ -55,22 +63,23 @@ class ImageLoader(load.LoaderPlugin): def load(self, context, name=None, namespace=None, data=None): # Format file name, Houdini only wants forward slashes - file_path = self.filepath_from_context(context) - file_path = os.path.normpath(file_path) - file_path = file_path.replace("\\", "/") - file_path = self._get_file_sequence(file_path) + path = self.filepath_from_context(context) + path = self.format_path(path, representation=context["representation"]) # Get the root node parent = get_image_avalon_container() # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name node = parent.createNode("file", node_name=node_name) node.moveToGoodPosition() - node.setParms({"filename1": file_path}) + parms = {"filename1": path} + parms.update(self.get_colorspace_parms(context["representation"])) + + node.setParms(parms) # Imprint it manually data = { @@ -79,7 +88,7 @@ class ImageLoader(load.LoaderPlugin): "name": node_name, "namespace": namespace, "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } # todo: add folder="Avalon" @@ -87,22 +96,23 @@ class ImageLoader(load.LoaderPlugin): return node - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) - file_path = file_path.replace("\\", "/") - file_path = self._get_file_sequence(file_path) + file_path = get_representation_path(repre_entity) + file_path = self.format_path(file_path, repre_entity) + + parms = { + "filename1": file_path, + "representation": repre_entity["id"], + } + + parms.update(self.get_colorspace_parms(repre_entity)) # Update attributes - node.setParms( - { - "filename1": file_path, - "representation": str(representation["_id"]), - } - ) + node.setParms(parms) def remove(self, container): @@ -119,14 +129,58 @@ class ImageLoader(load.LoaderPlugin): if not parent.children(): parent.destroy() - def _get_file_sequence(self, file_path): - root = os.path.dirname(file_path) - files = sorted(os.listdir(root)) + @staticmethod + def format_path(path, representation): + """Format file path correctly for single image or sequence.""" + if not os.path.exists(path): + raise RuntimeError("Path does not exist: %s" % path) - first_fname = files[0] - prefix, padding, suffix = first_fname.rsplit(".", 2) - fname = ".".join([prefix, "$F{}".format(len(padding)), suffix]) - return os.path.join(root, fname).replace("\\", "/") + ext = os.path.splitext(path)[-1] + + is_sequence = bool(representation["context"].get("frame")) + # The path is either a single file or sequence in a folder. + if not is_sequence: + filename = path + else: + filename = re.sub(r"(.*)\.(\d+){}$".format(re.escape(ext)), + "\\1.$F4{}".format(ext), + path) + + filename = os.path.join(path, filename) + + filename = os.path.normpath(filename) + filename = filename.replace("\\", "/") + + return filename + + def get_colorspace_parms(self, representation: dict) -> dict: + """Return the color space parameters. + + Returns the values for the colorspace parameters on the node if there + is colorspace data on the representation. + + Arguments: + representation (dict): The representation entity. + + Returns: + dict: Parm to value mapping if colorspace data is defined. + + """ + # Using OCIO colorspace on COP2 File node is only supported in Hou 20+ + major, _, _ = hou.applicationVersion() + if major < 20: + return {} + + data = representation.get("data", {}).get("colorspaceData", {}) + if not data: + return {} + + colorspace = data["colorspace"] + if colorspace: + return { + "colorspace": 3, # Use OpenColorIO + "ocio_space": colorspace + } def switch(self, container, representation): self.update(container, representation) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py index dd6e78b3bc..f09856a970 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_redshift_proxy.py @@ -13,9 +13,9 @@ import hou class RedshiftProxyLoader(load.LoaderPlugin): """Load Redshift Proxy""" - families = ["redshiftproxy"] + product_types = {"redshiftproxy"} label = "Load Redshift Proxy" - representations = ["rs"] + representations = {"rs"} order = -10 icon = "code-fork" color = "orange" @@ -26,7 +26,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): obj = hou.node("/obj") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create a new geo node @@ -72,19 +72,19 @@ class RedshiftProxyLoader(load.LoaderPlugin): suffix="", ) - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) node = container["node"] node.setParms({ "RS_objprop_proxy_file": self.format_path( - file_path, representation) + file_path, repre_entity) }) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": repre_entity["id"]}) def remove(self, container): diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py index 2c37c24884..4e6954c531 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_layer.py @@ -9,12 +9,12 @@ from ayon_core.hosts.houdini.api import lib class USDSublayerLoader(load.LoaderPlugin): """Sublayer USD file in Solaris""" - families = [ + product_types = { "usd", "usdCamera", - ] + } label = "Sublayer USD" - representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + representations = {"usd", "usda", "usdlc", "usdnc", "abc"} order = 1 icon = "code-fork" @@ -34,7 +34,7 @@ class USDSublayerLoader(load.LoaderPlugin): stage = hou.node("/stage") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create USD reference @@ -49,7 +49,7 @@ class USDSublayerLoader(load.LoaderPlugin): "name": node_name, "namespace": namespace, "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } # todo: add folder="Avalon" @@ -57,19 +57,19 @@ class USDSublayerLoader(load.LoaderPlugin): return container - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) file_path = file_path.replace("\\", "/") # Update attributes node.setParms( { "filepath1": file_path, - "representation": str(representation["_id"]), + "representation": repre_entity["id"], } ) @@ -81,5 +81,5 @@ class USDSublayerLoader(load.LoaderPlugin): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py index 9396f00cce..7e82a6abd0 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_reference.py @@ -9,12 +9,12 @@ from ayon_core.hosts.houdini.api import lib class USDReferenceLoader(load.LoaderPlugin): """Reference USD file in Solaris""" - families = [ + product_types = { "usd", "usdCamera", - ] + } label = "Reference USD" - representations = ["usd", "usda", "usdlc", "usdnc", "abc"] + representations = {"usd", "usda", "usdlc", "usdnc", "abc"} order = -8 icon = "code-fork" @@ -34,7 +34,7 @@ class USDReferenceLoader(load.LoaderPlugin): stage = hou.node("/stage") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create USD reference @@ -49,7 +49,7 @@ class USDReferenceLoader(load.LoaderPlugin): "name": node_name, "namespace": namespace, "loader": str(self.__class__.__name__), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], } # todo: add folder="Avalon" @@ -57,19 +57,19 @@ class USDReferenceLoader(load.LoaderPlugin): return container - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] # Update the file path - file_path = get_representation_path(representation) + file_path = get_representation_path(repre_entity) file_path = file_path.replace("\\", "/") # Update attributes node.setParms( { "filepath1": file_path, - "representation": str(representation["_id"]), + "representation": repre_entity["id"], } ) @@ -81,5 +81,5 @@ class USDReferenceLoader(load.LoaderPlugin): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py b/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py new file mode 100644 index 0000000000..506f6140bf --- /dev/null +++ b/client/ayon_core/hosts/houdini/plugins/load/load_usd_sop.py @@ -0,0 +1,77 @@ +import os + +from ayon_core.pipeline import load +from ayon_core.hosts.houdini.api import pipeline + + +class SopUsdImportLoader(load.LoaderPlugin): + """Load USD to SOPs via `usdimport`""" + + label = "Load USD to SOPs" + product_types = {"*"} + representations = {"usd"} + order = -6 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + import hou + + # Format file name, Houdini only wants forward slashes + file_path = self.filepath_from_context(context) + file_path = os.path.normpath(file_path) + file_path = file_path.replace("\\", "/") + + # Get the root node + obj = hou.node("/obj") + + # Define node name + namespace = namespace if namespace else context["folder"]["name"] + node_name = "{}_{}".format(namespace, name) if namespace else name + + # Create a new geo node + container = obj.createNode("geo", node_name=node_name) + + # Create a usdimport node + usdimport = container.createNode("usdimport", node_name=node_name) + usdimport.setParms({"filepath1": file_path}) + + # Set new position for unpack node else it gets cluttered + nodes = [container, usdimport] + + return pipeline.containerise( + node_name, + namespace, + nodes, + context, + self.__class__.__name__, + suffix="", + ) + + def update(self, container, context): + + node = container["node"] + try: + usdimport_node = next( + n for n in node.children() if n.type().name() == "usdimport" + ) + except StopIteration: + self.log.error("Could not find node of type `usdimport`") + return + + # Update the file path + file_path = self.filepath_from_context(context) + file_path = file_path.replace("\\", "/") + + usdimport_node.setParms({"filepath1": file_path}) + + # Update attribute + node.setParms({"representation": context["representation"]["id"]}) + + def remove(self, container): + + node = container["node"] + node.destroy() + + def switch(self, container, representation): + self.update(container, representation) diff --git a/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py b/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py index c3e374ee8d..0008f0d5f8 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py +++ b/client/ayon_core/hosts/houdini/plugins/load/load_vdb.py @@ -11,9 +11,9 @@ from ayon_core.hosts.houdini.api import pipeline class VdbLoader(load.LoaderPlugin): """Load VDB""" - families = ["vdbcache"] + product_types = {"vdbcache"} label = "Load VDB" - representations = ["vdb"] + representations = {"vdb"} order = -10 icon = "code-fork" color = "orange" @@ -26,7 +26,7 @@ class VdbLoader(load.LoaderPlugin): obj = hou.node("/obj") # Define node name - namespace = namespace if namespace else context["asset"]["name"] + namespace = namespace if namespace else context["folder"]["name"] node_name = "{}_{}".format(namespace, name) if namespace else name # Create a new geo node @@ -79,8 +79,8 @@ class VdbLoader(load.LoaderPlugin): return filename - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] node = container["node"] try: file_node = next( @@ -91,18 +91,18 @@ class VdbLoader(load.LoaderPlugin): return # Update the file path - file_path = get_representation_path(representation) - file_path = self.format_path(file_path, representation) + file_path = get_representation_path(repre_entity) + file_path = self.format_path(file_path, repre_entity) file_node.setParms({"file": file_path}) # Update attribute - node.setParms({"representation": str(representation["_id"])}) + node.setParms({"representation": repre_entity["id"]}) def remove(self, container): node = container["node"] node.destroy() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) diff --git a/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py b/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py index 2f86f23b68..0158a6b963 100644 --- a/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py +++ b/client/ayon_core/hosts/houdini/plugins/load/show_usdview.py @@ -10,8 +10,8 @@ class ShowInUsdview(load.LoaderPlugin): """Open USD file in usdview""" label = "Show in usdview" - representations = ["*"] - families = ["*"] + representations = {"*"} + product_types = {"*"} extensions = {"usd", "usda", "usdlc", "usdnc", "abc"} order = 15 diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_asset_handles.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_asset_handles.py index 6b62ea09d4..943a29952e 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_asset_handles.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_asset_handles.py @@ -8,7 +8,7 @@ from ayon_core.pipeline import AYONPyblishPluginMixin class CollectAssetHandles(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): - """Apply asset handles. + """Apply folder handles. If instance does not have: - frameStart @@ -19,7 +19,7 @@ class CollectAssetHandles(pyblish.api.InstancePlugin, - frameStartHandle - frameEndHandle - Then we will retrieve the asset's handles to compute + Then we will retrieve the folder's handles to compute the exclusive frame range and actual handle ranges. """ @@ -29,7 +29,7 @@ class CollectAssetHandles(pyblish.api.InstancePlugin, # this plugin runs after CollectAnatomyInstanceData order = pyblish.api.CollectorOrder + 0.499 - label = "Collect Asset Handles" + label = "Collect Folder Handles" use_asset_handles = True def process(self, instance): @@ -52,9 +52,9 @@ class CollectAssetHandles(pyblish.api.InstancePlugin, attr_values = self.get_attr_values_from_data(instance.data) if attr_values.get("use_handles", self.use_asset_handles): - asset_data = instance.data["assetEntity"]["data"] - handle_start = asset_data.get("handleStart", 0) - handle_end = asset_data.get("handleEnd", 0) + folder_attributes = instance.data["folderEntity"]["attrib"] + handle_start = folder_attributes.get("handleStart", 0) + handle_end = folder_attributes.get("handleEnd", 0) else: handle_start = 0 handle_end = 0 @@ -118,7 +118,7 @@ class CollectAssetHandles(pyblish.api.InstancePlugin, BoolDef("use_handles", tooltip="Disable this if you want the publisher to" " ignore start and end handles specified in the" - " asset data for this publish instance", + " folder attributes for this publish instance", default=cls.use_asset_handles, label="Use asset handles") ] diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py index 7d7fabb315..6cf6bbf430 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_inputs.py @@ -1,9 +1,21 @@ +from collections import deque + import pyblish.api from ayon_core.pipeline import registered_host -def collect_input_containers(nodes): +def get_container_members(container): + node = container["node"] + # Usually the loaded containers don't have any complex references + # and the contained children should be all we need. So we disregard + # checking for .references() on the nodes. + members = set(node.allSubChildren()) + members.add(node) # include the node itself + return members + + +def collect_input_containers(containers, nodes): """Collect containers that contain any of the node in `nodes`. This will return any loaded Avalon container that contains at least one of @@ -11,30 +23,13 @@ def collect_input_containers(nodes): there are member nodes of that container. Returns: - list: Input avalon containers + list: Loaded containers that contain the `nodes` """ - - # Lookup by node ids - lookup = frozenset(nodes) - - containers = [] - host = registered_host() - for container in host.ls(): - - node = container["node"] - - # Usually the loaded containers don't have any complex references - # and the contained children should be all we need. So we disregard - # checking for .references() on the nodes. - members = set(node.allSubChildren()) - members.add(node) # include the node itself - - # If there's an intersection - if not lookup.isdisjoint(members): - containers.append(container) - - return containers + # Assume the containers have collected their cached '_members' data + # in the collector. + return [container for container in containers + if any(node in container["_members"] for node in nodes)] def iter_upstream(node): @@ -54,7 +49,7 @@ def iter_upstream(node): ) # Initialize process queue with the node's ancestors itself - queue = list(upstream) + queue = deque(upstream) collected = set(upstream) # Traverse upstream references for all nodes and yield them as we @@ -72,6 +67,10 @@ def iter_upstream(node): # Include the references' ancestors that have not been collected yet. for reference in references: + if reference in collected: + # Might have been collected in previous iteration + continue + ancestors = reference.inputAncestors( include_ref_inputs=True, follow_subnets=True ) @@ -108,13 +107,32 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): ) return - # Collect all upstream parents - nodes = list(iter_upstream(output)) - nodes.append(output) + # For large scenes the querying of "host.ls()" can be relatively slow + # e.g. up to a second. Many instances calling it easily slows this + # down. As such, we cache it so we trigger it only once. + # todo: Instead of hidden cache make "CollectContainers" plug-in + cache_key = "__cache_containers" + scene_containers = instance.context.data.get(cache_key, None) + if scene_containers is None: + # Query the scenes' containers if there's no cache yet + host = registered_host() + scene_containers = list(host.ls()) + for container in scene_containers: + # Embed the members into the container dictionary + container_members = set(get_container_members(container)) + container["_members"] = container_members + instance.context.data[cache_key] = scene_containers - # Collect containers for the given set of nodes - containers = collect_input_containers(nodes) + inputs = [] + if scene_containers: + # Collect all upstream parents + nodes = list(iter_upstream(output)) + nodes.append(output) + + # Collect containers for the given set of nodes + containers = collect_input_containers(scene_containers, nodes) + + inputs = [c["representation"] for c in containers] - inputs = [c["representation"] for c in containers] instance.data["inputRepresentations"] = inputs self.log.debug("Collected inputs: %s" % inputs) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances.py deleted file mode 100644 index edfa78e4d9..0000000000 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances.py +++ /dev/null @@ -1,100 +0,0 @@ -import hou - -import pyblish.api - -from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID -from ayon_core.hosts.houdini.api import lib - - -class CollectInstances(pyblish.api.ContextPlugin): - """Gather instances by all node in out graph and pre-defined attributes - - This collector takes into account assets that are associated with - an specific node and marked with a unique identifier; - - Identifier: - id (str): "ayon.create.instance" - - Specific node: - The specific node is important because it dictates in which way the - product is being exported. - - alembic: will export Alembic file which supports cascading attributes - like 'cbId' and 'path' - geometry: Can export a wide range of file types, default out - - """ - - order = pyblish.api.CollectorOrder - 0.01 - label = "Collect Instances" - hosts = ["houdini"] - - def process(self, context): - - nodes = hou.node("/out").children() - nodes += hou.node("/obj").children() - - # Include instances in USD stage only when it exists so it - # remains backwards compatible with version before houdini 18 - stage = hou.node("/stage") - if stage: - nodes += stage.recursiveGlob("*", filter=hou.nodeTypeFilter.Rop) - - for node in nodes: - - if not node.parm("id"): - continue - - if node.evalParm("id") not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - # instance was created by new creator code, skip it as - # it is already collected. - if node.parm("creator_identifier"): - continue - - has_family = node.evalParm("family") - assert has_family, "'%s' is missing 'family'" % node.name() - - self.log.info( - "Processing legacy instance node {}".format(node.path()) - ) - - data = lib.read(node) - # Check bypass state and reverse - if hasattr(node, "isBypassed"): - data.update({"active": not node.isBypassed()}) - - # temporarily translation of `active` to `publish` till issue has - # been resolved. - # https://github.com/pyblish/pyblish-base/issues/307 - if "active" in data: - data["publish"] = data["active"] - - # Create nice name if the instance has a frame range. - label = data.get("name", node.name()) - label += " (%s)" % data["folderPath"] # include folder in name - - instance = context.create_instance(label) - - # Include `families` using `family` data - product_type = data["family"] - data["productType"] = product_type - instance.data["families"] = [product_type] - - instance[:] = [node] - instance.data["instance_node"] = node.path() - instance.data.update(data) - - def sort_by_family(instance): - """Sort by family""" - return instance.data.get( - "families", instance.data.get("productType") - ) - - # Sort/grouped by family (preserving local index) - context[:] = sorted(context, key=sort_by_family) - - return context diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py index 38d6ec733d..9377a9fcd0 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py @@ -15,10 +15,10 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): As opposed to storing `ayon.create.instance` as id on the node we store `pyblish.avalon.usdlayered`. - Additionally this instance has no need for storing family, asset, product - or name on the nodes. Instead all information is retrieved solely from - the output filepath, which is an Avalon URI: - avalon://{asset}/{product}.{representation} + Additionally this instance has no need for storing folder, product type, + product name or name on the nodes. Instead all information is retrieved + solely from the output filepath, which is an Avalon URI: + avalon://{folder}/{product}.{representation} Each final ROP node is considered a dependency for any of the Configured Save Path layers it sets along the way. As such, the instances shown in @@ -89,7 +89,7 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): # For now group ALL of them into USD Layer product group # Allow this product to be grouped into a USD Layer on creation - data["subsetGroup"] = "USD Layer" + data["productGroup"] = "USD Layer" instances = list() dependencies = [] @@ -142,9 +142,9 @@ class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): self.log.warning("Non Avalon URI Layer Path: %s" % save_path) return {} - # Collect asset + product from URI - name = "{product[name]} ({asset})".format(**uri_data) - fname = "{asset}_{product[name]}.{ext}".format(**uri_data) + # Collect folder + product from URI + name = "{product[name]} ({folder[path]})".format(**uri_data) + fname = "{folder[path]}_{product[name]}.{ext}".format(**uri_data) data = dict(uri_data) data["usdSavePath"] = save_path diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py index 85100bc2c6..78651b0c69 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_karma_rop.py @@ -41,23 +41,23 @@ class CollectKarmaROPRenderProducts(pyblish.api.InstancePlugin): instance.data["chunkSize"] = chunk_size self.log.debug("Chunk Size: %s" % chunk_size) - default_prefix = evalParmNoFrame(rop, "picture") - render_products = [] + default_prefix = evalParmNoFrame(rop, "picture") + render_products = [] - # Default beauty AOV - beauty_product = self.get_render_product_name( - prefix=default_prefix, suffix=None - ) - render_products.append(beauty_product) + # Default beauty AOV + beauty_product = self.get_render_product_name( + prefix=default_prefix, suffix=None + ) + render_products.append(beauty_product) - files_by_aov = { - "beauty": self.generate_expected_files(instance, - beauty_product) - } + files_by_aov = { + "beauty": self.generate_expected_files(instance, + beauty_product) + } - filenames = list(render_products) - instance.data["files"] = filenames - instance.data["renderProducts"] = colorspace.ARenderProduct() + filenames = list(render_products) + instance.data["files"] = filenames + instance.data["renderProducts"] = colorspace.ARenderProduct() for product in render_products: self.log.debug("Found render product: %s" % product) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py index d46476c2ce..df9acc4b61 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_mantra_rop.py @@ -41,57 +41,57 @@ class CollectMantraROPRenderProducts(pyblish.api.InstancePlugin): instance.data["chunkSize"] = chunk_size self.log.debug("Chunk Size: %s" % chunk_size) - default_prefix = evalParmNoFrame(rop, "vm_picture") - render_products = [] + default_prefix = evalParmNoFrame(rop, "vm_picture") + render_products = [] - # Store whether we are splitting the render job (export + render) - split_render = bool(rop.parm("soho_outputmode").eval()) - instance.data["splitRender"] = split_render - export_prefix = None - export_products = [] - if split_render: - export_prefix = evalParmNoFrame( - rop, "soho_diskfile", pad_character="0" - ) - beauty_export_product = self.get_render_product_name( - prefix=export_prefix, - suffix=None) - export_products.append(beauty_export_product) - self.log.debug( - "Found export product: {}".format(beauty_export_product) - ) - instance.data["ifdFile"] = beauty_export_product - instance.data["exportFiles"] = list(export_products) - - # Default beauty AOV - beauty_product = self.get_render_product_name( - prefix=default_prefix, suffix=None + # Store whether we are splitting the render job (export + render) + split_render = bool(rop.parm("soho_outputmode").eval()) + instance.data["splitRender"] = split_render + export_prefix = None + export_products = [] + if split_render: + export_prefix = evalParmNoFrame( + rop, "soho_diskfile", pad_character="0" ) - render_products.append(beauty_product) + beauty_export_product = self.get_render_product_name( + prefix=export_prefix, + suffix=None) + export_products.append(beauty_export_product) + self.log.debug( + "Found export product: {}".format(beauty_export_product) + ) + instance.data["ifdFile"] = beauty_export_product + instance.data["exportFiles"] = list(export_products) - files_by_aov = { - "beauty": self.generate_expected_files(instance, - beauty_product) - } + # Default beauty AOV + beauty_product = self.get_render_product_name( + prefix=default_prefix, suffix=None + ) + render_products.append(beauty_product) - aov_numbers = rop.evalParm("vm_numaux") - if aov_numbers > 0: - # get the filenames of the AOVs - for i in range(1, aov_numbers + 1): - var = rop.evalParm("vm_variable_plane%d" % i) - if var: - aov_name = "vm_filename_plane%d" % i - aov_boolean = "vm_usefile_plane%d" % i - aov_enabled = rop.evalParm(aov_boolean) - has_aov_path = rop.evalParm(aov_name) - if has_aov_path and aov_enabled == 1: - aov_prefix = evalParmNoFrame(rop, aov_name) - aov_product = self.get_render_product_name( - prefix=aov_prefix, suffix=None - ) - render_products.append(aov_product) + files_by_aov = { + "beauty": self.generate_expected_files(instance, + beauty_product) + } - files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa + aov_numbers = rop.evalParm("vm_numaux") + if aov_numbers > 0: + # get the filenames of the AOVs + for i in range(1, aov_numbers + 1): + var = rop.evalParm("vm_variable_plane%d" % i) + if var: + aov_name = "vm_filename_plane%d" % i + aov_boolean = "vm_usefile_plane%d" % i + aov_enabled = rop.evalParm(aov_boolean) + has_aov_path = rop.evalParm(aov_name) + if has_aov_path and aov_enabled == 1: + aov_prefix = evalParmNoFrame(rop, aov_name) + aov_product = self.get_render_product_name( + prefix=aov_prefix, suffix=None + ) + render_products.append(aov_product) + + files_by_aov[var] = self.generate_expected_files(instance, aov_product) # noqa for product in render_products: self.log.debug("Found render product: %s" % product) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py index 67cc080ead..55a55bb12a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -60,20 +60,30 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): instance.data["ifdFile"] = beauty_export_product instance.data["exportFiles"] = list(export_products) - # Default beauty AOV + full_exr_mode = (rop.evalParm("RS_outputMultilayerMode") == "2") + if full_exr_mode: + # Ignore beauty suffix if full mode is enabled + # As this is what the rop does. + beauty_suffix = "" + + # Default beauty/main layer AOV beauty_product = self.get_render_product_name( prefix=default_prefix, suffix=beauty_suffix ) render_products = [beauty_product] files_by_aov = { - "_": self.generate_expected_files(instance, - beauty_product)} + beauty_suffix: self.generate_expected_files(instance, + beauty_product) + } aovs_rop = rop.parm("RS_aovGetFromNode").evalAsNode() if aovs_rop: rop = aovs_rop - num_aovs = rop.evalParm("RS_aov") + num_aovs = 0 + if not rop.evalParm('RS_aovAllAOVsDisabled'): + num_aovs = rop.evalParm("RS_aov") + for index in range(num_aovs): i = index + 1 @@ -86,11 +96,14 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): if not aov_prefix: aov_prefix = default_prefix - aov_product = self.get_render_product_name(aov_prefix, aov_suffix) - render_products.append(aov_product) + if rop.parm(f"RS_aovID_{i}").evalAsString() == "CRYPTOMATTE" or \ + not full_exr_mode: + + aov_product = self.get_render_product_name(aov_prefix, aov_suffix) + render_products.append(aov_product) - files_by_aov[aov_suffix] = self.generate_expected_files(instance, - aov_product) # noqa + files_by_aov[aov_suffix] = self.generate_expected_files(instance, + aov_product) # noqa for product in render_products: self.log.debug("Found render product: %s" % product) @@ -118,7 +131,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): # When AOV is explicitly defined in prefix we just swap it out # directly with the AOV suffix to embed it. - # Note: ${AOV} seems to be evaluated in the parameter as %AOV% + # Note: '$AOV' seems to be evaluated in the parameter as '%AOV%' has_aov_in_prefix = "%AOV%" in prefix if has_aov_in_prefix: # It seems that when some special separator characters are present diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index 24ac9f22c3..cd82f1679a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,11 +1,7 @@ import pyblish.api +import ayon_api -from ayon_core.client import ( - get_subset_by_name, - get_asset_by_name, - get_asset_name_identifier, -) -from ayon_core.pipeline import usdlib +from ayon_core.pipeline import usdlib, KnownPublishError class CollectUsdBootstrap(pyblish.api.InstancePlugin): @@ -54,9 +50,13 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): self.log.debug("Add bootstrap for: %s" % bootstrap) project_name = instance.context.data["projectName"] - asset_name = instance.data["folderPath"] - asset_doc = get_asset_by_name(project_name, asset_name) - assert asset_doc, "Asset must exist: %s" % asset_name + folder_path = instance.data["folderPath"] + folder_name = folder_path.rsplit("/", 1)[-1] + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + if not folder_entity: + raise KnownPublishError( + "Folder '{}' does not exist".format(folder_path) + ) # Check which are not about to be created and don't exist yet required = {"shot": ["usdShot"], "asset": ["usdAsset"]}.get(bootstrap) @@ -73,20 +73,20 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): self.log.debug("Checking required bootstrap: %s" % required) for product_name in required: if self._product_exists( - project_name, instance, product_name, asset_doc + project_name, instance, product_name, folder_entity ): continue self.log.debug( "Creating {0} USD bootstrap: {1} {2}".format( - bootstrap, asset_name, product_name + bootstrap, folder_path, product_name ) ) product_type = "usd.bootstrap" new = instance.context.create_instance(product_name) new.data["productName"] = product_name - new.data["label"] = "{0} ({1})".format(product_name, asset_name) + new.data["label"] = "{0} ({1})".format(product_name, folder_name) new.data["productType"] = product_type new.data["family"] = product_type new.data["comment"] = "Automated bootstrap USD file." @@ -100,24 +100,24 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin): new.data[key] = instance.data[key] def _product_exists( - self, project_name, instance, product_name, asset_doc + self, project_name, instance, product_name, folder_entity ): """Return whether product exists in current context or in database.""" # Allow it to be created during this publish session context = instance.context - asset_doc_name = get_asset_name_identifier(asset_doc) + folder_path = folder_entity["path"] for inst in context: if ( inst.data["productName"] == product_name - and inst.data["folderPath"] == asset_doc_name + and inst.data["folderPath"] == folder_path ): return True # Or, if they already exist in the database we can # skip them too. - if get_subset_by_name( - project_name, product_name, asset_doc["_id"], fields=["_id"] + if ayon_api.get_product_by_name( + project_name, product_name, folder_entity["id"], fields={"id"} ): return True return False diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py index f085b6ca41..93add6806e 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -64,4 +64,4 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.append((layer, save_path)) # Allow this product to be grouped into a USD Layer on creation - layer_inst.data["subsetGroup"] = "USD Layer" + layer_inst.data["productGroup"] = "USD Layer" diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py index f80ca39f1c..62b7dcdd5d 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_vray_rop.py @@ -67,7 +67,7 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin): beauty_product = self.get_render_product_name(default_prefix) render_products.append(beauty_product) files_by_aov = { - "RGB Color": self.generate_expected_files(instance, + "": self.generate_expected_files(instance, beauty_product)} if instance.data.get("RenderElement", True): @@ -75,7 +75,9 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin): if render_element: for aov, renderpass in render_element.items(): render_products.append(renderpass) - files_by_aov[aov] = self.generate_expected_files(instance, renderpass) # noqa + files_by_aov[aov] = self.generate_expected_files( + instance, renderpass) + for product in render_products: self.log.debug("Found render product: %s" % product) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_active_view_thumbnail.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_active_view_thumbnail.py new file mode 100644 index 0000000000..aedcb1da02 --- /dev/null +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_active_view_thumbnail.py @@ -0,0 +1,55 @@ +import pyblish.api +import tempfile +from ayon_core.pipeline import publish +from ayon_core.hosts.houdini.api import lib +from ayon_core.hosts.houdini.api.pipeline import IS_HEADLESS + + +class ExtractActiveViewThumbnail(publish.Extractor): + """Set instance thumbnail to a screengrab of current active viewport. + + This makes it so that if an instance does not have a thumbnail set yet that + it will get a thumbnail of the currently active view at the time of + publishing as a fallback. + + """ + order = pyblish.api.ExtractorOrder + 0.49 + label = "Extract Active View Thumbnail" + families = ["workfile"] + hosts = ["houdini"] + + def process(self, instance): + if IS_HEADLESS: + self.log.debug( + "Skip extraction of active view thumbnail, due to being in" + "headless mode." + ) + return + + thumbnail = instance.data.get("thumbnailPath") + if thumbnail: + # A thumbnail was already set for this instance + return + + view_thumbnail = self.get_view_thumbnail(instance) + if not view_thumbnail: + return + self.log.debug("Setting instance thumbnail path to: {}" + .format(view_thumbnail) + ) + instance.data["thumbnailPath"] = view_thumbnail + + def get_view_thumbnail(self, instance): + + sceneview = lib.get_scene_viewer() + if sceneview is None: + self.log.debug("Skipping Extract Active View Thumbnail" + " because no scene view was detected.") + return + + with tempfile.NamedTemporaryFile("w", suffix=".jpg", delete=False) as tmp: + lib.sceneview_snapshot(sceneview, tmp.name) + thumbnail_path = tmp.name + + instance.context.data["cleanupFullPaths"].append(thumbnail_path) + return thumbnail_path diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py index c6dfb4332d..0fab69ef4a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_composite.py @@ -7,7 +7,8 @@ from ayon_core.hosts.houdini.api.lib import render_rop, splitext import hou -class ExtractComposite(publish.Extractor): +class ExtractComposite(publish.Extractor, + publish.ColormanagedPyblishPluginMixin): order = pyblish.api.ExtractorOrder label = "Extract Composite (Image Sequence)" @@ -45,8 +46,14 @@ class ExtractComposite(publish.Extractor): "frameEnd": instance.data["frameEndHandle"], } - from pprint import pformat - - self.log.info(pformat(representation)) + if ext.lower() == "exr": + # Inject colorspace with 'scene_linear' as that's the + # default Houdini working colorspace and all extracted + # OpenEXR images should be in that colorspace. + # https://www.sidefx.com/docs/houdini/render/linear.html#image-formats + self.set_representation_colorspace( + representation, instance.context, + colorspace="scene_linear" + ) instance.data["representations"].append(representation) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py index fabdfd9a9d..57bb8b881a 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_opengl.py @@ -8,7 +8,8 @@ from ayon_core.hosts.houdini.api.lib import render_rop import hou -class ExtractOpenGL(publish.Extractor): +class ExtractOpenGL(publish.Extractor, + publish.ColormanagedPyblishPluginMixin): order = pyblish.api.ExtractorOrder - 0.01 label = "Extract OpenGL" @@ -46,6 +47,14 @@ class ExtractOpenGL(publish.Extractor): "camera_name": instance.data.get("review_camera") } + if ropnode.evalParm("colorcorrect") == 2: # OpenColorIO enabled + colorspace = ropnode.evalParm("ociocolorspace") + # inject colorspace data + self.set_representation_colorspace( + representation, instance.context, + colorspace=colorspace + ) + if "representations" not in instance.data: instance.data["representations"] = [] instance.data["representations"].append(representation) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py b/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py index 99c61803e6..2e5c9a892c 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -1,17 +1,12 @@ import os import contextlib -import hou import sys from collections import deque +import hou +import ayon_api import pyblish.api -from ayon_core.client import ( - get_asset_by_name, - get_subset_by_name, - get_last_version_by_subset_id, - get_representation_by_name, -) from ayon_core.pipeline import ( get_representation_path, publish, @@ -284,29 +279,29 @@ class ExtractUSDLayered(publish.Extractor): # Compare this dependency with the latest published version # to detect whether we should make this into a new publish # version. If not, skip it. - asset_doc = get_asset_by_name( - project_name, dependency.data["folderPath"], fields=["_id"] + folder_entity = ayon_api.get_folder_by_path( + project_name, dependency.data["folderPath"], fields={"id"} ) - subset_doc = get_subset_by_name( + product_entity = ayon_api.get_product_by_name( project_name, dependency.data["productName"], - asset_doc["_id"], - fields=["_id"] + folder_entity["id"], + fields={"id"} ) - if not subset_doc: + if not product_entity: # Subset doesn't exist yet. Definitely new file self.log.debug("No existing product..") return False - version_doc = get_last_version_by_subset_id( - project_name, subset_doc["_id"], fields=["_id"] + version_entity = ayon_api.get_last_version_by_product_id( + project_name, product_entity["id"], fields={"id"} ) - if not version_doc: + if not version_entity: self.log.debug("No existing version..") return False - representation = get_representation_by_name( - project_name, ext.lstrip("."), version_doc["_id"] + representation = ayon_api.get_representation_by_name( + project_name, ext.lstrip("."), version_entity["id"] ) if not representation: self.log.debug("No existing representation..") diff --git a/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py b/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py index 73145b211a..fe8fa25f10 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/increment_current_file.py @@ -3,7 +3,6 @@ import pyblish.api from ayon_core.lib import version_up from ayon_core.pipeline import registered_host from ayon_core.pipeline.publish import get_errored_plugins_from_context -from ayon_core.hosts.houdini.api import HoudiniHost from ayon_core.pipeline.publish import KnownPublishError @@ -39,7 +38,7 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): ) # Filename must not have changed since collecting - host = registered_host() # type: HoudiniHost + host = registered_host() current_file = host.current_file() if context.data["currentFile"] != current_file: raise KnownPublishError( diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py index 95414ae7f1..fdf03d5cba 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -71,6 +71,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # the isinstance check above should be stricter than this category if output_node.type().category().name() != "Cop2": raise PublishValidationError( - ("Output node %s is not of category Cop2. " - "This is a bug...").format(output_node.path()), + ( + "Output node {} is not of category Cop2." + " This is a bug..." + ).format(output_node.path()), title=cls.label) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_frame_range.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_frame_range.py index 36e1b9b2a5..2a3418ee7e 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_frame_range.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_frame_range.py @@ -7,8 +7,8 @@ from ayon_core.hosts.houdini.api.action import SelectInvalidAction import hou -class DisableUseAssetHandlesAction(RepairAction): - label = "Disable use asset handles" +class DisableUseFolderHandlesAction(RepairAction): + label = "Disable use folder handles" icon = "mdi.toggle-switch-off" @@ -23,7 +23,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder - 0.1 hosts = ["houdini"] label = "Validate Frame Range" - actions = [DisableUseAssetHandlesAction, SelectInvalidAction] + actions = [DisableUseFolderHandlesAction, SelectInvalidAction] def process(self, instance): @@ -41,11 +41,11 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): "## Invalid Frame Range\n" "The frame range for the instance is invalid because " "the start frame is higher than the end frame.\n\nThis " - "is likely due to asset handles being applied to your " + "is likely due to folder handles being applied to your " "instance or the ROP node's start frame " "is set higher than the end frame.\n\nIf your ROP frame " - "range is correct and you do not want to apply asset " - "handles make sure to disable Use asset handles on the " + "range is correct and you do not want to apply folder " + "handles make sure to disable Use folder handles on the " "publish instance." ) ) @@ -71,7 +71,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): cls.log.info( "The ROP node render range is set to " "{0[frameStartHandle]} - {0[frameEndHandle]} " - "The asset handles applied to the instance are start handle " + "The folder handles applied to the instance are start handle " "{0[handleStart]} and end handle {0[handleEnd]}" .format(instance.data) ) @@ -84,7 +84,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): # Already fixed return - # Disable use asset handles + # Disable use folder handles context = instance.context create_context = context.data["create_context"] instance_id = instance.data.get("instance_id") @@ -102,5 +102,5 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): created_instance.publish_attributes["CollectAssetHandles"]["use_handles"] = False # noqa create_context.save_changes() - cls.log.debug("use asset handles is turned off for '{}'" + cls.log.debug("use folder handles is turned off for '{}'" .format(instance)) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_instance_in_context.py new file mode 100644 index 0000000000..26708e306b --- /dev/null +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_instance_in_context.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +"""Validate if instance asset is the same as context asset.""" + +import pyblish.api +from ayon_core.hosts.houdini.api.action import SelectROPAction +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) + + +class ValidateInstanceInContextHoudini(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validator to check if instance asset match context asset. + + When working in per-shot style you always publish data in context of + current asset (shot). This validator checks if this is so. It is optional + so it can be disabled when needed. + """ + # Similar to maya-equivalent `ValidateInstanceInContext` + + order = ValidateContentsOrder + label = "Instance in same Context" + optional = True + hosts = ["houdini"] + actions = [SelectROPAction, RepairAction] + + def process(self, instance): + if not self.is_active(instance.data): + return + + folder_path = instance.data.get("folderPath") + task = instance.data.get("task") + context = self.get_context(instance) + if (folder_path, task) != context: + context_label = "{} > {}".format(*context) + instance_label = "{} > {}".format(folder_path, task) + + raise PublishValidationError( + message=( + "Instance '{}' publishes to different asset than current " + "context: {}. Current context: {}".format( + instance.name, instance_label, context_label + ) + ), + description=( + "## Publishing to a different asset\n" + "There are publish instances present which are publishing " + "into a different asset than your current context.\n\n" + "Usually this is not what you want but there can be cases " + "where you might want to publish into another asset or " + "shot. If that's the case you can disable the validation " + "on the instance to ignore it." + ) + ) + + @classmethod + def repair(cls, instance): + context_folder, context_task = cls.get_context(instance) + + create_context = instance.context.data["create_context"] + instance_id = instance.data["instance_id"] + created_instance = create_context.get_instance_by_id( + instance_id + ) + created_instance["folderPath"] = context_folder + created_instance["task"] = context_task + create_context.save_changes() + + @staticmethod + def get_context(instance): + """Return folderPath, task from publishing context data""" + context = instance.context + return context.data["folderPath"], context.data["task"] diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py index 031138e21d..d3afa83b67 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_review_colorspace.py @@ -4,15 +4,19 @@ from ayon_core.pipeline import ( PublishValidationError, OptionalPyblishPluginMixin ) -from ayon_core.pipeline.publish import RepairAction +from ayon_core.pipeline.publish import ( + RepairAction, + get_plugin_settings, + apply_plugin_settings_automatically +) from ayon_core.hosts.houdini.api.action import SelectROPAction import os import hou -class SetDefaultViewSpaceAction(RepairAction): - label = "Set default view colorspace" +class ResetViewSpaceAction(RepairAction): + label = "Reset OCIO colorspace parm" icon = "mdi.monitor" @@ -27,9 +31,25 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin, families = ["review"] hosts = ["houdini"] label = "Validate Review Colorspace" - actions = [SetDefaultViewSpaceAction, SelectROPAction] + actions = [ResetViewSpaceAction, SelectROPAction] optional = True + review_color_space = "" + + @classmethod + def apply_settings(cls, project_settings): + # Preserve automatic settings applying logic + settings = get_plugin_settings(plugin=cls, + project_settings=project_settings, + log=cls.log, + category="houdini") + apply_plugin_settings_automatically(cls, settings, logger=cls.log) + + # Add review color settings + color_settings = project_settings["houdini"]["imageio"]["workfile"] + if color_settings["enabled"]: + cls.review_color_space = color_settings.get("review_color_space") + def process(self, instance): @@ -52,39 +72,54 @@ class ValidateReviewColorspace(pyblish.api.InstancePlugin, " 'OpenColorIO'".format(rop_node.path()) ) - if rop_node.evalParm("ociocolorspace") not in \ - hou.Color.ocio_spaces(): - + current_color_space = rop_node.evalParm("ociocolorspace") + if current_color_space not in hou.Color.ocio_spaces(): raise PublishValidationError( "Invalid value: Colorspace name doesn't exist.\n" "Check 'OCIO Colorspace' parameter on '{}' ROP" .format(rop_node.path()) ) - @classmethod - def repair(cls, instance): - """Set Default View Space Action. + # if houdini/imageio/workfile is enabled and + # Review colorspace setting is empty then this check should + # actually check if the current_color_space setting equals + # the default colorspace value. + # However, it will make the black cmd screen show up more often + # which is very annoying. + if self.review_color_space and \ + self.review_color_space != current_color_space: - It is a helper action more than a repair action, - used to set colorspace on opengl node to the default view. - """ - from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa - - rop_node = hou.node(instance.data["instance_node"]) - - if rop_node.evalParm("colorcorrect") != 2: - rop_node.setParms({"colorcorrect": 2}) - cls.log.debug( - "'Color Correction' parm on '{}' has been set to" - " 'OpenColorIO'".format(rop_node.path()) + raise PublishValidationError( + "Invalid value: Colorspace name doesn't match" + "the Colorspace specified in settings." ) - # Get default view colorspace name - default_view_space = get_default_display_view_colorspace() + @classmethod + def repair(cls, instance): + """Reset view colorspace. - rop_node.setParms({"ociocolorspace": default_view_space}) - cls.log.info( - "'OCIO Colorspace' parm on '{}' has been set to " - "the default view color space '{}'" - .format(rop_node, default_view_space) - ) + It is used to set colorspace on opengl node. + + It uses the colorspace value specified in the Houdini addon settings. + If the value in the Houdini addon settings is empty, + it will fall to the default colorspace. + + Note: + This repair action assumes that OCIO is enabled. + As if OCIO is disabled the whole validation is skipped + and this repair action won't show up. + """ + from ayon_core.hosts.houdini.api.lib import set_review_color_space + + # Fall to the default value if cls.review_color_space is empty. + if not cls.review_color_space: + # cls.review_color_space is an empty string + # when the imageio/workfile setting is disabled or + # when the Review colorspace setting is empty. + from ayon_core.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa + cls.review_color_space = get_default_display_view_colorspace() + + rop_node = hou.node(instance.data["instance_node"]) + set_review_color_space(rop_node, + cls.review_color_space, + cls.log) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py index e94f09568d..0481929824 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_subset_name.py @@ -15,21 +15,21 @@ from ayon_core.pipeline.create import get_product_name import hou -class FixSubsetNameAction(RepairAction): - label = "Fix Subset Name" +class FixProductNameAction(RepairAction): + label = "Fix Product Name" class ValidateSubsetName(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): - """Validate Subset name. + """Validate Product name. """ families = ["staticMesh"] hosts = ["houdini"] - label = "Validate Subset Name" + label = "Validate Product Name" order = ValidateContentsOrder + 0.1 - actions = [FixSubsetNameAction, SelectInvalidAction] + actions = [FixProductNameAction, SelectInvalidAction] optional = True @@ -54,15 +54,20 @@ class ValidateSubsetName(pyblish.api.InstancePlugin, rop_node = hou.node(instance.data["instance_node"]) # Check product name - asset_doc = instance.data["assetEntity"] + folder_entity = instance.data["folderEntity"] + task_entity = instance.data["taskEntity"] + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] product_name = get_product_name( instance.context.data["projectName"], - asset_doc, - instance.data["task"], + task_name, + task_type, instance.context.data["hostName"], instance.data["productType"], variant=instance.data["variant"], - dynamic_data={"asset": asset_doc["name"]} + dynamic_data={"asset": folder_entity["name"]} ) if instance.data.get("productName") != product_name: @@ -79,15 +84,20 @@ class ValidateSubsetName(pyblish.api.InstancePlugin, rop_node = hou.node(instance.data["instance_node"]) # Check product name - asset_doc = instance.data["assetEntity"] + folder_entity = instance.data["folderEntity"] + task_entity = instance.data["taskEntity"] + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] product_name = get_product_name( instance.context.data["projectName"], - asset_doc, - instance.data["task"], + task_name, + task_type, instance.context.data["hostName"], instance.data["productType"], variant=instance.data["variant"], - dynamic_data={"asset": asset_doc["name"]} + dynamic_data={"asset": folder_entity["name"]} ) instance.data["productName"] = product_name diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py index 33d0d42383..ae00bc9db4 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py @@ -24,7 +24,7 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin, - UCX This validator also checks if product name is correct - - {static mesh prefix}_{Asset-Name}{Variant}. + - {static mesh prefix}_{FolderName}{Variant}. """ diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index 6d21b59a9c..048d675c00 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,11 +1,14 @@ # -*- coding: utf-8 -*- import re +import ayon_api import pyblish.api -from ayon_core.client import get_subset_by_name -from ayon_core.pipeline.publish import ValidateContentsOrder -from ayon_core.pipeline import PublishValidationError +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + KnownPublishError, + PublishValidationError, +) class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -18,7 +21,7 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): def process(self, instance): project_name = instance.context.data["projectName"] - asset_name = instance.data["folderPath"] + folder_path = instance.data["folderPath"] product_name = instance.data["productName"] # Assume shading variation starts after a dot separator @@ -27,16 +30,21 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): "^usdShade", "usdModel", shade_product_name ) - asset_doc = instance.data.get("assetEntity") - if not asset_doc: - raise RuntimeError("Asset document is not filled on instance.") + folder_entity = instance.data.get("folderEntity") + if not folder_entity: + raise KnownPublishError( + "Folder entity is not filled on instance." + ) - subset_doc = get_subset_by_name( - project_name, model_product_name, asset_doc["_id"], fields=["_id"] + product_entity = ayon_api.get_product_by_name( + project_name, + model_product_name, + folder_entity["id"], + fields={"id"} ) - if not subset_doc: + if not product_entity: raise PublishValidationError( ("USD Model product not found: " - "{} ({})").format(model_product_name, asset_name), + "{} ({})").format(model_product_name, folder_path), title=self.label ) diff --git a/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index d85f20e3ce..2ea4b5d816 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -53,7 +53,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): # There were some issues with the editable node not having the right # configured path. So for now let's assure that is correct to.from value = ( - 'avalon://`chs("../asset_name")`/' + 'avalon://`chs("../folder_path")`/' 'usdShade`chs("../model_variantname1")`.usd' ) rop_value = rop.parm("lopoutput").rawValue() diff --git a/client/ayon_core/hosts/houdini/startup/MainMenuCommon.xml b/client/ayon_core/hosts/houdini/startup/MainMenuCommon.xml index b2ea142cd5..b6e78cbdc8 100644 --- a/client/ayon_core/hosts/houdini/startup/MainMenuCommon.xml +++ b/client/ayon_core/hosts/houdini/startup/MainMenuCommon.xml @@ -1,15 +1,15 @@ - + - + diff --git a/client/ayon_core/hosts/max/api/lib.py b/client/ayon_core/hosts/max/api/lib.py index 05c3364e4a..02b099b3ff 100644 --- a/client/ayon_core/hosts/max/api/lib.py +++ b/client/ayon_core/hosts/max/api/lib.py @@ -6,10 +6,18 @@ import json from typing import Any, Dict, Union import six -from ayon_core.pipeline import get_current_project_name, colorspace +import ayon_api + +from ayon_core.pipeline import ( + get_current_project_name, + get_current_folder_path, + get_current_task_name, + colorspace +) from ayon_core.settings import get_project_settings from ayon_core.pipeline.context_tools import ( - get_current_project, get_current_project_asset) + get_current_task_entity +) from ayon_core.style import load_stylesheet from pymxs import runtime as rt @@ -215,49 +223,33 @@ def set_scene_resolution(width: int, height: int): def reset_scene_resolution(): """Apply the scene resolution from the project definition - scene resolution can be overwritten by an asset if the asset.data contains - any information regarding scene resolution . - Returns: - None + scene resolution can be overwritten by a folder if the folder.attrib + contains any information regarding scene resolution. """ - data = ["data.resolutionWidth", "data.resolutionHeight"] - project_resolution = get_current_project(fields=data) - project_resolution_data = project_resolution["data"] - asset_resolution = get_current_project_asset(fields=data) - asset_resolution_data = asset_resolution["data"] - # Set project resolution - project_width = int(project_resolution_data.get("resolutionWidth", 1920)) - project_height = int(project_resolution_data.get("resolutionHeight", 1080)) - width = int(asset_resolution_data.get("resolutionWidth", project_width)) - height = int(asset_resolution_data.get("resolutionHeight", project_height)) + task_attributes = get_current_task_entity(fields={"attrib"})["attrib"] + width = int(task_attributes["resolutionWidth"]) + height = int(task_attributes["resolutionHeight"]) set_scene_resolution(width, height) -def get_frame_range(asset_doc=None) -> Union[Dict[str, Any], None]: - """Get the current assets frame range and handles. +def get_frame_range(task_entity=None) -> Union[Dict[str, Any], None]: + """Get the current task frame range and handles Args: - asset_doc (dict): Asset Entity Data + task_entity (dict): Task Entity. Returns: dict: with frame start, frame end, handle start, handle end. """ # Set frame start/end - if asset_doc is None: - asset_doc = get_current_project_asset() - - data = asset_doc["data"] - frame_start = data.get("frameStart") - frame_end = data.get("frameEnd") - - if frame_start is None or frame_end is None: - return {} - - frame_start = int(frame_start) - frame_end = int(frame_end) - handle_start = int(data.get("handleStart", 0)) - handle_end = int(data.get("handleEnd", 0)) + if task_entity is None: + task_entity = get_current_task_entity(fields={"attrib"}) + task_attributes = task_entity["attrib"] + frame_start = int(task_attributes["frameStart"]) + frame_end = int(task_attributes["frameEnd"]) + handle_start = int(task_attributes["handleStart"]) + handle_end = int(task_attributes["handleEnd"]) frame_start_handle = frame_start - handle_start frame_end_handle = frame_end + handle_end @@ -272,7 +264,7 @@ def get_frame_range(asset_doc=None) -> Union[Dict[str, Any], None]: def reset_frame_range(fps: bool = True): - """Set frame range to current asset. + """Set frame range to current folder. This is part of 3dsmax documentation: animationRange: A System Global variable which lets you get and @@ -283,8 +275,9 @@ def reset_frame_range(fps: bool = True): scene frame rate in frames-per-second. """ if fps: - data_fps = get_current_project(fields=["data.fps"]) - fps_number = float(data_fps["data"]["fps"]) + task_entity = get_current_task_entity() + task_attributes = task_entity["attrib"] + fps_number = float(task_attributes["fps"]) rt.frameRate = fps_number frame_range = get_frame_range() @@ -328,7 +321,7 @@ def convert_unit_scale(): def set_context_setting(): """Apply the project settings from the project definition - Settings can be overwritten by an asset if the asset.data contains + Settings can be overwritten by an folder if the folder.attrib contains any information regarding those settings. Examples of settings: diff --git a/client/ayon_core/hosts/max/api/lib_rendersettings.py b/client/ayon_core/hosts/max/api/lib_rendersettings.py index 7ffc024ba3..35b6d064c1 100644 --- a/client/ayon_core/hosts/max/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/max/api/lib_rendersettings.py @@ -3,7 +3,7 @@ from pymxs import runtime as rt from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.max.api.lib import ( set_render_frame_range, @@ -57,14 +57,14 @@ class RenderSettings(object): if not os.path.exists(output_dir): os.makedirs(output_dir) # hard-coded, should be customized in the setting - context = get_current_project_asset() + folder_attributes = get_current_folder_entity()["attrib"] # get project resolution - width = context["data"].get("resolutionWidth") - height = context["data"].get("resolutionHeight") + width = folder_attributes.get("resolutionWidth") + height = folder_attributes.get("resolutionHeight") # Set Frame Range - frame_start = context["data"].get("frame_start") - frame_end = context["data"].get("frame_end") + frame_start = folder_attributes.get("frame_start") + frame_end = folder_attributes.get("frame_end") set_render_frame_range(frame_start, frame_end) # get the production render renderer_class = get_current_renderer() diff --git a/client/ayon_core/hosts/max/api/menu.py b/client/ayon_core/hosts/max/api/menu.py index d968874a7e..c6ceeb3a43 100644 --- a/client/ayon_core/hosts/max/api/menu.py +++ b/client/ayon_core/hosts/max/api/menu.py @@ -8,8 +8,8 @@ from ayon_core.tools.utils import host_tools from ayon_core.hosts.max.api import lib -class OpenPypeMenu(object): - """Object representing OpenPype/AYON menu. +class AYONMenu(object): + """Object representing AYON menu. This is using "hack" to inject itself before "Help" menu of 3dsmax. For some reason `postLoadingMenus` event doesn't fire, and main menu @@ -39,7 +39,7 @@ class OpenPypeMenu(object): self._counter = 0 self._timer.stop() - self.build_openpype_menu() + self._build_ayon_menu() @staticmethod def get_main_widget(): @@ -50,8 +50,8 @@ class OpenPypeMenu(object): """Get main Menubar by 3dsmax main window.""" return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] - def get_or_create_openpype_menu( - self, name: str = "&Openpype", + def _get_or_create_ayon_menu( + self, name: str = "&AYON", before: str = "&Help") -> QtWidgets.QAction: """Create AYON menu. @@ -73,7 +73,7 @@ class OpenPypeMenu(object): help_action = None for item in menu_items: if name in item.title(): - # we already have OpenPype menu + # we already have AYON menu return item if before in item.title(): @@ -85,50 +85,50 @@ class OpenPypeMenu(object): self.menu = op_menu return op_menu - def build_openpype_menu(self) -> QtWidgets.QAction: + def _build_ayon_menu(self) -> QtWidgets.QAction: """Build items in AYON menu.""" - openpype_menu = self.get_or_create_openpype_menu() - load_action = QtWidgets.QAction("Load...", openpype_menu) + ayon_menu = self._get_or_create_ayon_menu() + load_action = QtWidgets.QAction("Load...", ayon_menu) load_action.triggered.connect(self.load_callback) - openpype_menu.addAction(load_action) + ayon_menu.addAction(load_action) - publish_action = QtWidgets.QAction("Publish...", openpype_menu) + publish_action = QtWidgets.QAction("Publish...", ayon_menu) publish_action.triggered.connect(self.publish_callback) - openpype_menu.addAction(publish_action) + ayon_menu.addAction(publish_action) - manage_action = QtWidgets.QAction("Manage...", openpype_menu) + manage_action = QtWidgets.QAction("Manage...", ayon_menu) manage_action.triggered.connect(self.manage_callback) - openpype_menu.addAction(manage_action) + ayon_menu.addAction(manage_action) - library_action = QtWidgets.QAction("Library...", openpype_menu) + library_action = QtWidgets.QAction("Library...", ayon_menu) library_action.triggered.connect(self.library_callback) - openpype_menu.addAction(library_action) + ayon_menu.addAction(library_action) - openpype_menu.addSeparator() + ayon_menu.addSeparator() - workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) + workfiles_action = QtWidgets.QAction("Work Files...", ayon_menu) workfiles_action.triggered.connect(self.workfiles_callback) - openpype_menu.addAction(workfiles_action) + ayon_menu.addAction(workfiles_action) - openpype_menu.addSeparator() + ayon_menu.addSeparator() - res_action = QtWidgets.QAction("Set Resolution", openpype_menu) + res_action = QtWidgets.QAction("Set Resolution", ayon_menu) res_action.triggered.connect(self.resolution_callback) - openpype_menu.addAction(res_action) + ayon_menu.addAction(res_action) - frame_action = QtWidgets.QAction("Set Frame Range", openpype_menu) + frame_action = QtWidgets.QAction("Set Frame Range", ayon_menu) frame_action.triggered.connect(self.frame_range_callback) - openpype_menu.addAction(frame_action) + ayon_menu.addAction(frame_action) - colorspace_action = QtWidgets.QAction("Set Colorspace", openpype_menu) + colorspace_action = QtWidgets.QAction("Set Colorspace", ayon_menu) colorspace_action.triggered.connect(self.colorspace_callback) - openpype_menu.addAction(colorspace_action) + ayon_menu.addAction(colorspace_action) - unit_scale_action = QtWidgets.QAction("Set Unit Scale", openpype_menu) + unit_scale_action = QtWidgets.QAction("Set Unit Scale", ayon_menu) unit_scale_action.triggered.connect(self.unit_scale_callback) - openpype_menu.addAction(unit_scale_action) + ayon_menu.addAction(unit_scale_action) - return openpype_menu + return ayon_menu def load_callback(self): """Callback to show Loader tool.""" diff --git a/client/ayon_core/hosts/max/api/pipeline.py b/client/ayon_core/hosts/max/api/pipeline.py index 1486f7218d..675f36c24f 100644 --- a/client/ayon_core/hosts/max/api/pipeline.py +++ b/client/ayon_core/hosts/max/api/pipeline.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Pipeline tools for OpenPype Houdini integration.""" +"""Pipeline tools for AYON 3ds max integration.""" import os import logging from operator import attrgetter @@ -14,7 +14,7 @@ from ayon_core.pipeline import ( AVALON_CONTAINER_ID, AYON_CONTAINER_ID, ) -from ayon_core.hosts.max.api.menu import OpenPypeMenu +from ayon_core.hosts.max.api.menu import AYONMenu from ayon_core.hosts.max.api import lib from ayon_core.hosts.max.api.plugin import MS_CUSTOM_ATTRIB from ayon_core.hosts.max import MAX_HOST_DIR @@ -48,7 +48,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): register_creator_plugin_path(CREATE_PATH) # self._register_callbacks() - self.menu = OpenPypeMenu() + self.menu = AYONMenu() self._has_been_setup = True @@ -94,7 +94,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): def _deferred_menu_creation(self): self.log.info("Building menu ...") - self.menu = OpenPypeMenu() + self.menu = AYONMenu() @staticmethod def create_context_node(): @@ -148,7 +148,7 @@ attributes "OpenPypeContext" def ls() -> list: - """Get all OpenPype instances.""" + """Get all AYON containers.""" objs = rt.objects containers = [ obj for obj in objs @@ -169,7 +169,7 @@ def containerise(name: str, nodes: list, context, "name": name, "namespace": namespace or "", "loader": loader, - "representation": context["representation"]["_id"], + "representation": context["representation"]["id"], } container_name = f"{namespace}:{name}{suffix}" container = rt.container(name=container_name) @@ -240,10 +240,10 @@ def get_previous_loaded_object(container: str): node_list(list): list of nodes which are previously loaded """ node_list = [] - sel_list = rt.getProperty(container.modifiers[0].openPypeData, "sel_list") - for obj in rt.Objects: - if str(obj) in sel_list: - node_list.append(obj) + node_transform_monitor_list = rt.getProperty( + container.modifiers[0].openPypeData, "all_handles") + for node_transform_monitor in node_transform_monitor_list: + node_list.append(node_transform_monitor.node) return node_list diff --git a/client/ayon_core/hosts/max/api/plugin.py b/client/ayon_core/hosts/max/api/plugin.py index 4d5d18a42d..e5d12ce87d 100644 --- a/client/ayon_core/hosts/max/api/plugin.py +++ b/client/ayon_core/hosts/max/api/plugin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""3dsmax specific Avalon/Pyblish plugin definitions.""" +"""3dsmax specific AYON/Pyblish plugin definitions.""" from abc import ABCMeta import six @@ -156,10 +156,6 @@ MS_CUSTOM_ATTRIB = """attributes "openPypeData" )""" -class OpenPypeCreatorError(CreatorError): - pass - - class MaxCreatorBase(object): @staticmethod diff --git a/client/ayon_core/hosts/max/api/preview_animation.py b/client/ayon_core/hosts/max/api/preview_animation.py index f715efa53d..399d3b6222 100644 --- a/client/ayon_core/hosts/max/api/preview_animation.py +++ b/client/ayon_core/hosts/max/api/preview_animation.py @@ -31,23 +31,26 @@ def viewport_layout_and_camera(camera, layout="layout_1"): layout (str): layout to use in viewport, defaults to `layout_1` Use None to not change viewport layout during context. """ + needs_maximise = 0 + # Set to first active non extended viewport + rt.viewport.activeViewportEx(1) original_camera = rt.viewport.getCamera() - original_layout = rt.viewport.getLayout() - if not original_camera: - # if there is no original camera - # use the current camera as original - original_camera = rt.getNodeByName(camera) + original_type = rt.viewport.getType() review_camera = rt.getNodeByName(camera) + try: - if layout is not None: - layout = rt.Name(layout) - if rt.viewport.getLayout() != layout: - rt.viewport.setLayout(layout) + if rt.viewport.getLayout() != rt.name(layout): + rt.execute("max tool maximize") + needs_maximise = 1 rt.viewport.setCamera(review_camera) yield finally: - rt.viewport.setLayout(original_layout) - rt.viewport.setCamera(original_camera) + if needs_maximise == 1: + rt.execute("max tool maximize") + if original_type == rt.Name("view_camera"): + rt.viewport.setCamera(original_camera) + else: + rt.viewport.setType(original_type) @contextlib.contextmanager diff --git a/client/ayon_core/hosts/max/hooks/force_startup_script.py b/client/ayon_core/hosts/max/hooks/force_startup_script.py index 659be7dfc6..417f0049ab 100644 --- a/client/ayon_core/hosts/max/hooks/force_startup_script.py +++ b/client/ayon_core/hosts/max/hooks/force_startup_script.py @@ -2,11 +2,11 @@ """Pre-launch to force 3ds max startup script.""" import os from ayon_core.hosts.max import MAX_HOST_DIR -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class ForceStartupScript(PreLaunchHook): - """Inject OpenPype environment to 3ds max. + """Inject AYON environment to 3ds max. Note that this works in combination whit 3dsmax startup script that is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH diff --git a/client/ayon_core/hosts/max/hooks/inject_python.py b/client/ayon_core/hosts/max/hooks/inject_python.py index 36d53551ba..fc9626ab87 100644 --- a/client/ayon_core/hosts/max/hooks/inject_python.py +++ b/client/ayon_core/hosts/max/hooks/inject_python.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- """Pre-launch hook to inject python environment.""" import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class InjectPythonPath(PreLaunchHook): - """Inject OpenPype environment to 3dsmax. + """Inject AYON environment to 3dsmax. Note that this works in combination whit 3dsmax startup script that is translating it back to PYTHONPATH for cases when 3dsmax drops PYTHONPATH diff --git a/client/ayon_core/hosts/max/hooks/set_paths.py b/client/ayon_core/hosts/max/hooks/set_paths.py index 0ee1b0dab7..f066de092e 100644 --- a/client/ayon_core/hosts/max/hooks/set_paths.py +++ b/client/ayon_core/hosts/max/hooks/set_paths.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class SetPath(PreLaunchHook): diff --git a/client/ayon_core/hosts/max/plugins/create/create_workfile.py b/client/ayon_core/hosts/max/plugins/create/create_workfile.py index 1552149413..901da6254c 100644 --- a/client/ayon_core/hosts/max/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/max/plugins/create/create_workfile.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Creator plugin for creating workfiles.""" +import ayon_api + from ayon_core.pipeline import CreatedInstance, AutoCreator -from ayon_core.client import get_asset_by_name, get_asset_name_identifier from ayon_core.hosts.max.api import plugin from ayon_core.hosts.max.api.lib import read, imprint from pymxs import runtime as rt @@ -24,21 +25,26 @@ class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator): if instance.creator_identifier == self.identifier ), None) project_name = self.project_name - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() task_name = self.create_context.get_current_task_name() host_name = self.create_context.host_name if current_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": variant } @@ -46,8 +52,8 @@ class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator): data.update( self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, current_instance) @@ -61,21 +67,25 @@ class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator): self._add_instance_to_context(current_instance) imprint(instance_node.name, current_instance.data) elif ( - current_instance["folderPath"] != asset_name + current_instance["folderPath"] != folder_path or current_instance["task"] != task_name ): # Update instance context if is not the same - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) - asset_name = get_asset_name_identifier(asset_doc) - current_instance["folderPath"] = asset_name + current_instance["folderPath"] = folder_entity["path"] current_instance["task"] = task_name current_instance["productName"] = product_name @@ -98,21 +108,6 @@ class CreateWorkfile(plugin.MaxCreatorBase, AutoCreator): created_inst.data_to_store() ) - def remove_instances(self, instances): - """Remove specified instance from the scene. - - This is only removing `id` parameter so instance is no longer - instance, because it might contain valuable data for artist. - - """ - for instance in instances: - instance_node = rt.GetNodeByName( - instance.data.get("instance_node")) - if instance_node: - rt.Delete(instance_node) - - self._remove_instance_from_context(instance) - def create_node(self, product_name): if rt.getNodeByName(product_name): node = rt.getNodeByName(product_name) diff --git a/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py index 8387d7a837..6f1e9988c5 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_camera_fbx.py @@ -18,8 +18,8 @@ from ayon_core.pipeline import get_representation_path, load class FbxLoader(load.LoaderPlugin): """Fbx Loader.""" - families = ["camera"] - representations = ["fbx"] + product_types = {"camera"} + representations = {"fbx"} order = -9 icon = "code-fork" color = "white" @@ -51,10 +51,11 @@ class FbxLoader(load.LoaderPlugin): name, selections, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node_name = container["instance_node"] node = rt.getNodeByName(node_name) namespace, _ = get_namespace(node_name) @@ -87,11 +88,11 @@ class FbxLoader(load.LoaderPlugin): update_custom_attribute_data(node, fbx_objects) lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py index ead77cd2f2..4f982dd5ba 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_max_scene.py +++ b/client/ayon_core/hosts/max/plugins/load/load_max_scene.py @@ -1,10 +1,12 @@ import os - +from qtpy import QtWidgets, QtCore +from ayon_core.lib.attribute_definitions import EnumDef from ayon_core.hosts.max.api import lib from ayon_core.hosts.max.api.lib import ( unique_namespace, get_namespace, - object_transform_set + object_transform_set, + is_headless ) from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, @@ -14,25 +16,97 @@ from ayon_core.hosts.max.api.pipeline import ( from ayon_core.pipeline import get_representation_path, load +class MaterialDupOptionsWindow(QtWidgets.QDialog): + """The pop-up dialog allows users to choose material + duplicate options for importing Max objects when updating + or switching assets. + """ + def __init__(self, material_options): + super(MaterialDupOptionsWindow, self).__init__() + self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) + + self.material_option = None + self.material_options = material_options + + self.widgets = { + "label": QtWidgets.QLabel( + "Select material duplicate options before loading the max scene."), + "material_options_list": QtWidgets.QListWidget(), + "warning": QtWidgets.QLabel("No material options selected!"), + "buttons": QtWidgets.QWidget(), + "okButton": QtWidgets.QPushButton("Ok"), + "cancelButton": QtWidgets.QPushButton("Cancel") + } + for key, value in material_options.items(): + item = QtWidgets.QListWidgetItem(value) + self.widgets["material_options_list"].addItem(item) + item.setData(QtCore.Qt.UserRole, key) + # Build buttons. + layout = QtWidgets.QHBoxLayout(self.widgets["buttons"]) + layout.addWidget(self.widgets["okButton"]) + layout.addWidget(self.widgets["cancelButton"]) + # Build layout. + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(self.widgets["label"]) + layout.addWidget(self.widgets["material_options_list"]) + layout.addWidget(self.widgets["buttons"]) + + self.widgets["okButton"].pressed.connect(self.on_ok_pressed) + self.widgets["cancelButton"].pressed.connect(self.on_cancel_pressed) + self.widgets["material_options_list"].itemPressed.connect( + self.on_material_options_pressed) + + def on_material_options_pressed(self, item): + self.material_option = item.data(QtCore.Qt.UserRole) + + def on_ok_pressed(self): + if self.material_option is None: + self.widgets["warning"].setVisible(True) + return + self.close() + + def on_cancel_pressed(self): + self.material_option = "promptMtlDups" + self.close() + class MaxSceneLoader(load.LoaderPlugin): """Max Scene Loader.""" - families = ["camera", - "maxScene", - "model"] + product_types = { + "camera", + "maxScene", + "model", + } - representations = ["max"] + representations = {"max"} order = -8 icon = "code-fork" color = "green" + mtl_dup_default = "promptMtlDups" + mtl_dup_enum_dict = { + "promptMtlDups": "Prompt on Duplicate Materials", + "useMergedMtlDups": "Use Incoming Material", + "useSceneMtlDups": "Use Scene Material", + "renameMtlDups": "Merge and Rename Incoming Material" + } + @classmethod + def get_options(cls, contexts): + return [ + EnumDef("mtldup", + items=cls.mtl_dup_enum_dict, + default=cls.mtl_dup_default, + label="Material Duplicate Options") + ] - def load(self, context, name=None, namespace=None, data=None): + def load(self, context, name=None, namespace=None, options=None): from pymxs import runtime as rt + mat_dup_options = options.get("mtldup", self.mtl_dup_default) path = self.filepath_from_context(context) path = os.path.normpath(path) # import the max scene by using "merge file" path = path.replace('\\', '/') - rt.MergeMaxFile(path, quiet=True, includeFullGroup=True) + rt.MergeMaxFile(path, rt.Name(mat_dup_options), + quiet=True, includeFullGroup=True) max_objects = rt.getLastMergedNodes() max_object_names = [obj.name for obj in max_objects] # implement the OP/AYON custom attributes before load @@ -48,10 +122,11 @@ class MaxSceneLoader(load.LoaderPlugin): name, max_container, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node_name = container["instance_node"] node = rt.getNodeByName(node_name) namespace, _ = get_namespace(node_name) @@ -66,7 +141,12 @@ class MaxSceneLoader(load.LoaderPlugin): for prev_max_obj in prev_max_objects: if rt.isValidNode(prev_max_obj): # noqa rt.Delete(prev_max_obj) - rt.MergeMaxFile(path, quiet=True) + material_option = self.mtl_dup_default + if not is_headless(): + window = MaterialDupOptionsWindow(self.mtl_dup_enum_dict) + window.exec_() + material_option = window.material_option + rt.MergeMaxFile(path, rt.Name(material_option), quiet=True) current_max_objects = rt.getLastMergedNodes() @@ -86,11 +166,11 @@ class MaxSceneLoader(load.LoaderPlugin): update_custom_attribute_data(node, max_objects) lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_model.py b/client/ayon_core/hosts/max/plugins/load/load_model.py index cf35e107c2..1070fce2bd 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model.py @@ -14,9 +14,9 @@ from ayon_core.hosts.max.api.lib import ( class ModelAbcLoader(load.LoaderPlugin): """Loading model with the Alembic loader.""" - families = ["model"] + product_types = {"model"} label = "Load Model with Alembic" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" color = "orange" @@ -70,10 +70,11 @@ class ModelAbcLoader(load.LoaderPlugin): namespace, loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node = rt.GetNodeByName(container["instance_node"]) node_list = [n for n in get_previous_loaded_object(node) if rt.ClassOf(n) == rt.AlembicContainer] @@ -90,11 +91,11 @@ class ModelAbcLoader(load.LoaderPlugin): abc_obj.source = path lib.imprint( container["instance_node"], - {"representation": str(representation["_id"])}, + {"representation": repre_entity["id"]}, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py index c0bacca33a..82cad71c3e 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_fbx.py @@ -17,8 +17,8 @@ from ayon_core.hosts.max.api.lib import maintained_selection class FbxModelLoader(load.LoaderPlugin): """Fbx Model Loader.""" - families = ["model"] - representations = ["fbx"] + product_types = {"model"} + representations = {"fbx"} order = -9 icon = "code-fork" color = "white" @@ -47,10 +47,11 @@ class FbxModelLoader(load.LoaderPlugin): name, selections, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node_name = container["instance_node"] node = rt.getNodeByName(node_name) if not node: @@ -85,11 +86,11 @@ class FbxModelLoader(load.LoaderPlugin): rt.Select(node) update_custom_attribute_data(node, fbx_objects) lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py index 1023b67f0c..38f2cdf43c 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_obj.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_obj.py @@ -7,7 +7,6 @@ from ayon_core.hosts.max.api.lib import ( maintained_selection, object_transform_set ) -from ayon_core.hosts.max.api.lib import maintained_selection from ayon_core.hosts.max.api.pipeline import ( containerise, get_previous_loaded_object, @@ -20,8 +19,8 @@ from ayon_core.pipeline import get_representation_path, load class ObjLoader(load.LoaderPlugin): """Obj Loader.""" - families = ["model"] - representations = ["obj"] + product_types = {"model"} + representations = {"obj"} order = -9 icon = "code-fork" color = "white" @@ -47,10 +46,11 @@ class ObjLoader(load.LoaderPlugin): name, selections, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node_name = container["instance_node"] node = rt.getNodeByName(node_name) namespace, _ = get_namespace(node_name) @@ -77,11 +77,11 @@ class ObjLoader(load.LoaderPlugin): rt.Select(node) lib.imprint(node_name, { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py index 0ec6e5e8e7..2b946eb2aa 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_model_usd.py +++ b/client/ayon_core/hosts/max/plugins/load/load_model_usd.py @@ -22,9 +22,9 @@ from ayon_core.pipeline import get_representation_path, load class ModelUSDLoader(load.LoaderPlugin): """Loading model with the USD loader.""" - families = ["model"] + product_types = {"model"} label = "Load Model(USD)" - representations = ["usda"] + representations = {"usda"} order = -10 icon = "code-fork" color = "orange" @@ -65,8 +65,9 @@ class ModelUSDLoader(load.LoaderPlugin): name, usd_objects, context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node_name = container["instance_node"] node = rt.GetNodeByName(node_name) namespace, name = get_namespace(node_name) @@ -107,11 +108,11 @@ class ModelUSDLoader(load.LoaderPlugin): rt.Select(node) lib.imprint(node_name, { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache.py index e9cde4c654..0743b3bb34 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache.py @@ -18,9 +18,9 @@ from ayon_core.hosts.max.api.pipeline import ( class AbcLoader(load.LoaderPlugin): """Alembic loader.""" - families = ["camera", "animation", "pointcache"] + product_types = {"camera", "animation", "pointcache"} label = "Load Alembic" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" color = "orange" @@ -76,10 +76,11 @@ class AbcLoader(load.LoaderPlugin): namespace, loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node = rt.GetNodeByName(container["instance_node"]) abc_container = [n for n in get_previous_loaded_object(node) if rt.ClassOf(n) == rt.AlembicContainer] @@ -96,11 +97,11 @@ class AbcLoader(load.LoaderPlugin): abc_obj.source = path lib.imprint( container["instance_node"], - {"representation": str(representation["_id"])}, + {"representation": repre_entity["id"]}, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py index 338cbfafb9..2efb7c7f62 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcache_ornatrix.py @@ -21,9 +21,9 @@ from pymxs import runtime as rt class OxAbcLoader(load.LoaderPlugin): """Ornatrix Alembic loader.""" - families = ["camera", "animation", "pointcache"] + product_types = {"camera", "animation", "pointcache"} label = "Load Alembic with Ornatrix" - representations = ["abc"] + representations = {"abc"} order = -10 icon = "code-fork" color = "orange" @@ -62,8 +62,9 @@ class OxAbcLoader(load.LoaderPlugin): namespace, loader=self.__class__.__name__ ) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node_name = container["instance_node"] namespace, name = get_namespace(node_name) node = rt.getNodeByName(node_name) @@ -98,11 +99,11 @@ class OxAbcLoader(load.LoaderPlugin): update_custom_attribute_data(node, ox_abc_objects) lib.imprint( container["instance_node"], - {"representation": str(representation["_id"])}, + {"representation": repre_entity["id"]}, ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py b/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py index 7f4fba50b3..0e79882fc5 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py +++ b/client/ayon_core/hosts/max/plugins/load/load_pointcloud.py @@ -17,8 +17,8 @@ from ayon_core.pipeline import get_representation_path, load class PointCloudLoader(load.LoaderPlugin): """Point Cloud Loader.""" - families = ["pointcloud"] - representations = ["prt"] + product_types = {"pointcloud"} + representations = {"prt"} order = -8 icon = "code-fork" color = "green" @@ -41,11 +41,12 @@ class PointCloudLoader(load.LoaderPlugin): name, [obj], context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): """update the container""" from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node = rt.GetNodeByName(container["instance_node"]) node_list = get_previous_loaded_object(node) update_custom_attribute_data( @@ -55,11 +56,11 @@ class PointCloudLoader(load.LoaderPlugin): for prt in rt.Selection: prt.filename = path lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): """remove the container""" diff --git a/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py index 5f2f5ec1ad..22d42390d9 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/max/plugins/load/load_redshift_proxy.py @@ -23,8 +23,8 @@ class RedshiftProxyLoader(load.LoaderPlugin): """Load rs files with Redshift Proxy""" label = "Load Redshift Proxy" - families = ["redshiftproxy"] - representations = ["rs"] + product_types = {"redshiftproxy"} + representations = {"rs"} order = -9 icon = "code-fork" color = "white" @@ -52,10 +52,11 @@ class RedshiftProxyLoader(load.LoaderPlugin): name, [rs_proxy], context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node = rt.getNodeByName(container["instance_node"]) node_list = get_previous_loaded_object(node) rt.Select(node_list) @@ -65,11 +66,11 @@ class RedshiftProxyLoader(load.LoaderPlugin): proxy.file = path lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from pymxs import runtime as rt diff --git a/client/ayon_core/hosts/max/plugins/load/load_tycache.py b/client/ayon_core/hosts/max/plugins/load/load_tycache.py index 7ae1aea72c..7a5296d933 100644 --- a/client/ayon_core/hosts/max/plugins/load/load_tycache.py +++ b/client/ayon_core/hosts/max/plugins/load/load_tycache.py @@ -16,8 +16,8 @@ from ayon_core.pipeline import get_representation_path, load class TyCacheLoader(load.LoaderPlugin): """TyCache Loader.""" - families = ["tycache"] - representations = ["tyc"] + product_types = {"tycache"} + representations = {"tyc"} order = -8 icon = "code-fork" color = "green" @@ -39,11 +39,12 @@ class TyCacheLoader(load.LoaderPlugin): name, [obj], context, namespace, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): """update the container""" from pymxs import runtime as rt - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) node = rt.GetNodeByName(container["instance_node"]) node_list = get_previous_loaded_object(node) update_custom_attribute_data(node, node_list) @@ -51,11 +52,11 @@ class TyCacheLoader(load.LoaderPlugin): for tyc in node_list: tyc.filename = path lib.imprint(container["instance_node"], { - "representation": str(representation["_id"]) + "representation": repre_entity["id"] }) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): """remove the container""" diff --git a/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py b/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py index 67b5174200..67cec23ecc 100644 --- a/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py +++ b/client/ayon_core/hosts/max/plugins/publish/extract_alembic.py @@ -53,6 +53,7 @@ class ExtractAlembic(publish.Extractor, hosts = ["max"] families = ["pointcache"] optional = True + active = True def process(self, instance): if not self.is_active(instance.data): @@ -102,24 +103,27 @@ class ExtractAlembic(publish.Extractor, @classmethod def get_attribute_defs(cls): - return [ + defs = super(ExtractAlembic, cls).get_attribute_defs() + defs.extend([ BoolDef("custom_attrs", label="Custom Attributes", default=False), - ] + ]) + return defs class ExtractCameraAlembic(ExtractAlembic): """Extract Camera with AlembicExport.""" - label = "Extract Alembic Camera" families = ["camera"] + optional = True -class ExtractModel(ExtractAlembic): +class ExtractModelAlembic(ExtractAlembic): """Extract Geometry in Alembic Format""" label = "Extract Geometry (Alembic)" families = ["model"] + optional = True def _set_abc_attributes(self, instance): attr_values = self.get_attr_values_from_data(instance.data) diff --git a/client/ayon_core/hosts/max/plugins/publish/help/validate_model_name.xml b/client/ayon_core/hosts/max/plugins/publish/help/validate_model_name.xml new file mode 100644 index 0000000000..e41146910a --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/help/validate_model_name.xml @@ -0,0 +1,26 @@ + + + +Invalid Model Name +## Nodes found with Invalid Model Name + +Nodes were detected in your scene which have invalid model name which does not +match the regex you preset in AYON setting. +### How to repair? +Make sure the model name aligns with validation regex in your AYON setting. + + + +### Invalid nodes + +{nodes} + + +### How could this happen? + +This often happens if you have mesh with the model naming does not match +with regex in the setting. + + + + \ No newline at end of file diff --git a/client/ayon_core/hosts/max/plugins/publish/increment_workfile_version.py b/client/ayon_core/hosts/max/plugins/publish/increment_workfile_version.py index 5f319966fe..c7c3f49626 100644 --- a/client/ayon_core/hosts/max/plugins/publish/increment_workfile_version.py +++ b/client/ayon_core/hosts/max/plugins/publish/increment_workfile_version.py @@ -9,7 +9,7 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin): order = pyblish.api.IntegratorOrder + 0.9 label = "Increment Workfile Version" hosts = ["max"] - families = ["workfile"] + families = ["maxrender", "workfile"] def process(self, context): path = context.data["currentFile"] diff --git a/client/ayon_core/hosts/max/plugins/publish/save_scene.py b/client/ayon_core/hosts/max/plugins/publish/save_scene.py index 1c59335ceb..fe2c7f50f4 100644 --- a/client/ayon_core/hosts/max/plugins/publish/save_scene.py +++ b/client/ayon_core/hosts/max/plugins/publish/save_scene.py @@ -2,7 +2,7 @@ import pyblish.api from ayon_core.pipeline import registered_host -class SaveCurrentScene(pyblish.api.ContextPlugin): +class SaveCurrentScene(pyblish.api.InstancePlugin): """Save current scene""" label = "Save current file" @@ -10,13 +10,15 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): hosts = ["max"] families = ["maxrender", "workfile"] - def process(self, context): + def process(self, instance): host = registered_host() current_file = host.get_current_workfile() - assert context.data["currentFile"] == current_file + assert instance.context.data["currentFile"] == current_file + if instance.data["productType"] == "maxrender": + host.save_workfile(current_file) - if host.workfile_has_unsaved_changes(): + elif host.workfile_has_unsaved_changes(): self.log.info(f"Saving current file: {current_file}") host.save_workfile(current_file) else: diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py b/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py index 0473fd4a8a..334e7dcec9 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_camera_contents.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import pyblish.api + from ayon_core.pipeline import PublishValidationError -from pymxs import runtime as rt class ValidateCameraContent(pyblish.api.InstancePlugin): diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_deadline_publish.py b/client/ayon_core/hosts/max/plugins/publish/validate_deadline_publish.py deleted file mode 100644 index 2c9ca4ae64..0000000000 --- a/client/ayon_core/hosts/max/plugins/publish/validate_deadline_publish.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -import pyblish.api -from pymxs import runtime as rt -from ayon_core.pipeline.publish import ( - RepairAction, - ValidateContentsOrder, - PublishValidationError, - OptionalPyblishPluginMixin -) -from ayon_core.hosts.max.api.lib_rendersettings import RenderSettings - - -class ValidateDeadlinePublish(pyblish.api.InstancePlugin, - OptionalPyblishPluginMixin): - """Validates Render File Directory is - not the same in every submission - """ - - order = ValidateContentsOrder - families = ["maxrender"] - hosts = ["max"] - label = "Render Output for Deadline" - optional = True - actions = [RepairAction] - - def process(self, instance): - if not self.is_active(instance.data): - return - file = rt.maxFileName - filename, ext = os.path.splitext(file) - if filename not in rt.rendOutputFilename: - raise PublishValidationError( - "Render output folder " - "doesn't match the max scene name! " - "Use Repair action to " - "fix the folder file path.." - ) - - @classmethod - def repair(cls, instance): - container = instance.data.get("instance_node") - RenderSettings().render_output(container) - cls.log.debug("Reset the render output folder...") diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_extended_viewport.py b/client/ayon_core/hosts/max/plugins/publish/validate_extended_viewport.py new file mode 100644 index 0000000000..ed476ec874 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_extended_viewport.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from ayon_core.pipeline import PublishValidationError +from pymxs import runtime as rt + + +class ValidateExtendedViewport(pyblish.api.ContextPlugin): + """Validate if the first viewport is an extended viewport.""" + + order = pyblish.api.ValidatorOrder + families = ["review"] + hosts = ["max"] + label = "Validate Extended Viewport" + + def process(self, context): + try: + rt.viewport.activeViewportEx(1) + except RuntimeError: + raise PublishValidationError( + "Please make sure one viewport is not an extended viewport", + description = ( + "Please make sure at least one viewport is not an " + "extended viewport but a 3dsmax supported viewport " + "i.e camera/persp/orthographic view.\n\n" + "To rectify it, please go to view in the top menubar, " + "go to Views -> Viewports Configuration -> Layout and " + "right click on one of the panels to change it." + )) + diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py b/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py index 22fda37e61..11b55232d5 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_frame_range.py @@ -18,11 +18,11 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, """Validates the frame ranges. This is an optional validator checking if the frame range on instance - matches the frame range specified for the asset. + matches the frame range specified for the folder. It also validates render frame ranges of render layers. - Repair action will change everything to match the asset frame range. + Repair action will change everything to match the folder frame range. This can be turned off by the artist to allow custom ranges. """ @@ -42,7 +42,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, return frame_range = get_frame_range( - asset_doc=instance.data["assetEntity"]) + instance.data["taskEntity"]) inst_frame_start = instance.data.get("frameStartHandle") inst_frame_end = instance.data.get("frameEndHandle") @@ -57,12 +57,12 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, if frame_start_handle != inst_frame_start: errors.append( f"Start frame ({inst_frame_start}) on instance does not match " # noqa - f"with the start frame ({frame_start_handle}) set on the asset data. ") # noqa + f"with the start frame ({frame_start_handle}) set on the folder attributes. ") # noqa if frame_end_handle != inst_frame_end: errors.append( f"End frame ({inst_frame_end}) on instance does not match " f"with the end frame ({frame_end_handle}) " - "from the asset data. ") + "from the folder attributes. ") if errors: bullet_point_errors = "\n".join( diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py new file mode 100644 index 0000000000..5107665235 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_instance_in_context.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +"""Validate if instance context is the same as current context.""" +import pyblish.api +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) +from ayon_core.hosts.max.api.action import SelectInvalidAction +from pymxs import runtime as rt + + +class ValidateInstanceInContext(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validator to check if instance context match current context. + + When working in per-shot style you always publish data in context of + current context (shot). This validator checks if this is so. It is optional + so it can be disabled when needed. + + Action on this validator will select invalid instances. + """ + order = ValidateContentsOrder + label = "Instance in same Context" + optional = True + hosts = ["max"] + actions = [SelectInvalidAction, RepairAction] + + def process(self, instance): + if not self.is_active(instance.data): + return + + folderPath = instance.data.get("folderPath") + task = instance.data.get("task") + context = self.get_context(instance) + if (folderPath, task) != context: + context_label = "{} > {}".format(*context) + instance_label = "{} > {}".format(folderPath, task) + message = ( + "Instance '{}' publishes to different context(folder or task) " + "than current context: {}. Current context: {}".format( + instance.name, instance_label, context_label + ) + ) + raise PublishValidationError( + message=message, + description=( + "## Publishing to a different context data(folder or task)\n" + "There are publish instances present which are publishing " + "into a different folder path or task than your current context.\n\n" + "Usually this is not what you want but there can be cases " + "where you might want to publish into another context or " + "shot. If that's the case you can disable the validation " + "on the instance to ignore it." + ) + ) + + @classmethod + def get_invalid(cls, instance): + invalid = [] + folderPath = instance.data.get("folderPath") + task = instance.data.get("task") + context = cls.get_context(instance) + if (folderPath, task) != context: + invalid.append(rt.getNodeByName(instance.name)) + return invalid + + @classmethod + def repair(cls, instance): + context_asset = instance.context.data["folderPath"] + context_task = instance.context.data["task"] + instance_node = rt.getNodeByName(instance.data.get( + "instance_node", "")) + if not instance_node: + return + rt.SetUserProp(instance_node, "folderPath", context_asset) + rt.SetUserProp(instance_node, "task", context_task) + + @staticmethod + def get_context(instance): + """Return asset, task from publishing context data""" + context = instance.context + return context.data["folderPath"], context.data["task"] diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_mesh_has_uv.py b/client/ayon_core/hosts/max/plugins/publish/validate_mesh_has_uv.py new file mode 100644 index 0000000000..ccd91da2be --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_mesh_has_uv.py @@ -0,0 +1,60 @@ + +import pyblish.api +from ayon_core.hosts.max.api.action import SelectInvalidAction +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) +from pymxs import runtime as rt + + +class ValidateMeshHasUVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + + """Validate the current mesh has UVs. + + This validator only checks if the mesh has UVs but not + whether all the individual faces of the mesh have UVs. + + It validates whether the current mesh has texture vertices. + If the mesh does not have texture vertices, it does not + have UVs in Max. + + """ + + order = ValidateMeshOrder + hosts = ['max'] + families = ['model'] + label = 'Validate Mesh Has UVs' + actions = [SelectInvalidAction] + optional = True + + @classmethod + def get_invalid(cls, instance): + meshes = [member for member in instance.data["members"] + if rt.isProperty(member, "mesh")] + invalid = [member for member in meshes + if member.mesh.numTVerts == 0] + return invalid + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + bullet_point_invalid_statement = "\n".join( + "- {}".format(invalid.name) for invalid + in invalid + ) + report = ( + "Model meshes are required to have UVs.\n\n" + "Meshes detected with invalid or missing UVs:\n" + f"{bullet_point_invalid_statement}\n" + ) + raise PublishValidationError( + report, + description=( + "Model meshes are required to have UVs.\n\n" + "Meshes detected with no texture vertice or missing UVs"), + title="Non-mesh objects found or mesh has missing UVs") diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_model_name.py b/client/ayon_core/hosts/max/plugins/publish/validate_model_name.py new file mode 100644 index 0000000000..eb86e2e5bd --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_model_name.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +"""Validate model nodes names.""" +import re + +import pyblish.api + +from ayon_core.hosts.max.api.action import SelectInvalidAction + +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, + PublishXmlValidationError, + ValidateContentsOrder +) + +class ValidateModelName(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validate Model Name. + + Validation regex is `(.*)_(?P.*)_(GEO)` by default. + The setting supports the following regex group name: + - project + - asset + - subset + + Examples: + `{SOME_RANDOM_NAME}_{YOUR_SUBSET_NAME}_GEO` should be your + default model name. + The regex of `(?P.*)` can be replaced by `(?P.*)` + and `(?P.*)`. + `(.*)_(?P.*)_(GEO)` check if your model name is + `{SOME_RANDOM_NAME}_{CURRENT_ASSET_NAME}_GEO` + `(.*)_(?P.*)_(GEO)` check if your model name is + `{SOME_RANDOM_NAME}_{CURRENT_PROJECT_NAME}_GEO` + + """ + optional = True + order = ValidateContentsOrder + hosts = ["max"] + families = ["model"] + label = "Validate Model Name" + actions = [SelectInvalidAction] + # defined by settings + regex = r"(.*)_(?P.*)_(GEO)" + # cache + regex_compiled = None + + def process(self, instance): + if not self.is_active(instance.data): + return + + invalid = self.get_invalid(instance) + if invalid: + names = "\n".join( + "- {}".format(node.name) for node in invalid + ) + raise PublishXmlValidationError( + plugin=self, + message="Nodes found with invalid model names: {}".format(invalid), + formatting_data={"nodes": names} + ) + + @classmethod + def get_invalid(cls, instance): + if not cls.regex: + cls.log.warning("No regex pattern set. Nothing to validate.") + return + + members = instance.data.get("members") + if not members: + cls.log.error("No members found in the instance.") + return + + cls.regex_compiled = re.compile(cls.regex) + + invalid = [] + for obj in members: + if cls.invalid_name(instance, obj): + invalid.append(obj) + return invalid + + @classmethod + def invalid_name(cls, instance, obj): + """Function to check the object has invalid name + regarding to the validation regex in the AYON setttings + + Args: + instance (pyblish.api.instance): Instance + obj (str): object name + + Returns: + str: invalid object + """ + regex = cls.regex_compiled + name = obj.name + match = regex.match(name) + + if match is None: + cls.log.error("Invalid model name on: %s", name) + cls.log.error("Name doesn't match regex {}".format(regex.pattern)) + return obj + + # Validate regex groups + invalid = False + compare = { + "project": instance.context.data["projectName"], + "asset": instance.data["folderPath"], + "subset": instance.data["productName"] + } + for key, required_value in compare.items(): + if key in regex.groupindex: + if match.group(key) != required_value: + cls.log.error( + "Invalid %s name for the model %s, " + "required name is %s", + key, name, required_value + ) + invalid = True + + if invalid: + return obj diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_no_animation.py b/client/ayon_core/hosts/max/plugins/publish/validate_no_animation.py new file mode 100644 index 0000000000..4b2a18d606 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_no_animation.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from pymxs import runtime as rt +from ayon_core.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from ayon_core.hosts.max.api.action import SelectInvalidAction + + +def get_invalid_keys(obj): + """function to check on whether there is keyframe in + + Args: + obj (str): object needed to check if there is a keyframe + + Returns: + bool: whether invalid object(s) exist + """ + for transform in ["Position", "Rotation", "Scale"]: + num_of_key = rt.NumKeys(rt.getPropertyController( + obj.controller, transform)) + if num_of_key > 0: + return True + return False + + +class ValidateNoAnimation(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): + """Validates No Animation + + Ensure no keyframes on nodes in the Instance + """ + + order = pyblish.api.ValidatorOrder + families = ["model"] + hosts = ["max"] + optional = True + label = "Validate No Animation" + actions = [SelectInvalidAction] + + def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) + if invalid: + raise PublishValidationError( + "Keyframes found on:\n\n{0}".format(invalid) + , + title="Keyframes on model" + ) + + @staticmethod + def get_invalid(instance): + """Get invalid object(s) which have keyframe(s) + + + Args: + instance (pyblish.api.instance): Instance + + Returns: + list: list of invalid objects + """ + invalid = [invalid for invalid in instance.data["members"] + if invalid.isAnimated or get_invalid_keys(invalid)] + + return invalid diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_renderpasses.py b/client/ayon_core/hosts/max/plugins/publish/validate_renderpasses.py new file mode 100644 index 0000000000..394d3119c4 --- /dev/null +++ b/client/ayon_core/hosts/max/plugins/publish/validate_renderpasses.py @@ -0,0 +1,185 @@ +import os +import pyblish.api +from pymxs import runtime as rt +from ayon_core.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) +from ayon_core.hosts.max.api.lib_rendersettings import RenderSettings + + +class ValidateRenderPasses(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validates Render Passes before farm submission + """ + + order = ValidateContentsOrder + families = ["maxrender"] + hosts = ["max"] + label = "Validate Render Passes" + actions = [RepairAction] + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + bullet_point_invalid_statement = "\n".join( + f"- {err_type}: {filepath}" for err_type, filepath + in invalid + ) + report = ( + "Invalid render passes found.\n\n" + f"{bullet_point_invalid_statement}\n\n" + "You can use repair action to fix the invalid filepath." + ) + raise PublishValidationError( + report, title="Invalid Render Passes") + + @classmethod + def get_invalid(cls, instance): + """Function to get invalid beauty render outputs and + render elements. + + 1. Check Render Output Folder matches the name of + the current Max Scene, e.g. + The name of the current Max scene: + John_Doe.max + The expected render output directory: + {root[work]}/{project[name]}/{hierarchy}/{asset}/ + work/{task[name]}/render/3dsmax/John_Doe/ + + 2. Check image extension(s) of the render output(s) + matches the image format in OP/AYON setting, e.g. + The current image format in settings: png + The expected render outputs: John_Doe.png + + 3. Check filename of render element ends with the name of + render element from the 3dsMax Render Element Manager. + e.g. The name of render element: RsCryptomatte + The expected filename: {InstanceName}_RsCryptomatte.png + + Args: + instance (pyblish.api.Instance): instance + workfile_name (str): filename of the Max scene + + Returns: + list: list of invalid filename which doesn't match + with the project name + """ + invalid = [] + file = rt.maxFileName + workfile_name, ext = os.path.splitext(file) + if workfile_name not in rt.rendOutputFilename: + cls.log.error( + "Render output folder must include" + f" the max scene name {workfile_name} " + ) + invalid_folder_name = os.path.dirname( + rt.rendOutputFilename).replace( + "\\", "/").split("/")[-1] + invalid.append(("Invalid Render Output Folder", + invalid_folder_name)) + beauty_fname = os.path.basename(rt.rendOutputFilename) + beauty_name, ext = os.path.splitext(beauty_fname) + invalid_filenames = cls.get_invalid_filenames( + instance, beauty_name) + invalid.extend(invalid_filenames) + invalid_image_format = cls.get_invalid_image_format( + instance, ext.lstrip(".")) + invalid.extend(invalid_image_format) + renderer = instance.data["renderer"] + if renderer in [ + "ART_Renderer", + "Redshift_Renderer", + "V_Ray_6_Hotfix_3", + "V_Ray_GPU_6_Hotfix_3", + "Default_Scanline_Renderer", + "Quicksilver_Hardware_Renderer", + ]: + render_elem = rt.maxOps.GetCurRenderElementMgr() + render_elem_num = render_elem.NumRenderElements() + for i in range(render_elem_num): + renderlayer_name = render_elem.GetRenderElement(i) + renderpass = str(renderlayer_name).rsplit(":", 1)[-1] + rend_file = render_elem.GetRenderElementFilename(i) + if not rend_file: + continue + + rend_fname, ext = os.path.splitext( + os.path.basename(rend_file)) + invalid_filenames = cls.get_invalid_filenames( + instance, rend_fname, renderpass=renderpass) + invalid.extend(invalid_filenames) + invalid_image_format = cls.get_invalid_image_format( + instance, ext) + invalid.extend(invalid_image_format) + elif renderer == "Arnold": + cls.log.debug( + "Renderpass validation does not support Arnold yet," + " validation skipped...") + else: + cls.log.debug( + "Skipping render element validation " + f"for renderer: {renderer}") + return invalid + + @classmethod + def get_invalid_filenames(cls, instance, file_name, renderpass=None): + """Function to get invalid filenames from render outputs. + + Args: + instance (pyblish.api.Instance): instance + file_name (str): name of the file + renderpass (str, optional): name of the renderpass. + Defaults to None. + + Returns: + list: invalid filenames + """ + invalid = [] + if instance.name not in file_name: + cls.log.error("The renderpass filename should contain the instance name.") + invalid.append(("Invalid instance name", + file_name)) + if renderpass is not None: + if not file_name.rstrip(".").endswith(renderpass): + cls.log.error( + f"Filename for {renderpass} should " + f"end with {renderpass}: {file_name}" + ) + invalid.append((f"Invalid {renderpass}", + os.path.basename(file_name))) + return invalid + + @classmethod + def get_invalid_image_format(cls, instance, ext): + """Function to check if the image format of the render outputs + aligns with that in the setting. + + Args: + instance (pyblish.api.Instance): instance + ext (str): image extension + + Returns: + list: list of files with invalid image format + """ + invalid = [] + settings = instance.context.data["project_settings"].get("max") + image_format = settings["RenderSettings"]["image_format"] + ext = ext.lstrip(".") + if ext != image_format: + msg = ( + f"Invalid image format {ext} for render outputs.\n" + f"Should be: {image_format}") + cls.log.error(msg) + invalid.append((msg, ext)) + return invalid + + @classmethod + def repair(cls, instance): + container = instance.data.get("instance_node") + # TODO: need to rename the function of render_output + RenderSettings().render_output(container) + cls.log.debug("Finished repairing the render output " + "folder and filenames.") diff --git a/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py b/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py index 0058d3b262..5f6cd0a21d 100644 --- a/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py +++ b/client/ayon_core/hosts/max/plugins/publish/validate_resolution_setting.py @@ -7,7 +7,10 @@ from ayon_core.pipeline.publish import ( RepairAction, PublishValidationError ) -from ayon_core.hosts.max.api.lib import reset_scene_resolution +from ayon_core.hosts.max.api.lib import ( + reset_scene_resolution, + imprint +) class ValidateResolutionSetting(pyblish.api.InstancePlugin, @@ -24,9 +27,11 @@ class ValidateResolutionSetting(pyblish.api.InstancePlugin, def process(self, instance): if not self.is_active(instance.data): return - width, height = self.get_db_resolution(instance) - current_width = rt.renderWidth - current_height = rt.renderHeight + width, height = self.get_folder_resolution(instance) + current_width, current_height = ( + self.get_current_resolution(instance) + ) + if current_width != width and current_height != height: raise PublishValidationError("Resolution Setting " "not matching resolution " @@ -41,18 +46,47 @@ class ValidateResolutionSetting(pyblish.api.InstancePlugin, "not matching resolution set " "on asset or shot.") - def get_db_resolution(self, instance): - asset_doc = instance.data["assetEntity"] - project_doc = instance.context.data["projectEntity"] - for data in [asset_doc["data"], project_doc["data"]]: - if "resolutionWidth" in data and "resolutionHeight" in data: - width = data["resolutionWidth"] - height = data["resolutionHeight"] - return int(width), int(height) + def get_current_resolution(self, instance): + return rt.renderWidth, rt.renderHeight - # Defaults if not found in asset document or project document + @classmethod + def get_folder_resolution(cls, instance): + task_entity = instance.data.get("taskEntity") + if task_entity: + task_attributes = task_entity["attrib"] + width = task_attributes["resolutionWidth"] + height = task_attributes["resolutionHeight"] + return int(width), int(height) + + # Defaults if not found in folder entity return 1920, 1080 @classmethod def repair(cls, instance): reset_scene_resolution() + + +class ValidateReviewResolutionSetting(ValidateResolutionSetting): + families = ["review"] + optional = True + actions = [RepairAction] + + def get_current_resolution(self, instance): + current_width = instance.data["review_width"] + current_height = instance.data["review_height"] + return current_width, current_height + + @classmethod + def repair(cls, instance): + context_width, context_height = ( + cls.get_folder_resolution(instance) + ) + creator_attrs = instance.data["creator_attributes"] + creator_attrs["review_width"] = context_width + creator_attrs["review_height"] = context_height + creator_attrs_data = { + "creator_attributes": creator_attrs + } + # update the width and height of review + # data in creator_attributes + imprint(instance.data["instance_node"], creator_attrs_data) diff --git a/client/ayon_core/hosts/max/startup/startup.ms b/client/ayon_core/hosts/max/startup/startup.ms index 4c597901f3..2dfe53a6a5 100644 --- a/client/ayon_core/hosts/max/startup/startup.ms +++ b/client/ayon_core/hosts/max/startup/startup.ms @@ -1,4 +1,4 @@ --- OpenPype Init Script +-- AYON Init Script ( local sysPath = dotNetClass "System.IO.Path" local sysDir = dotNetClass "System.IO.Directory" diff --git a/client/ayon_core/hosts/maya/addon.py b/client/ayon_core/hosts/maya/addon.py index c68aa4c911..1ad0fcf4cf 100644 --- a/client/ayon_core/hosts/maya/addon.py +++ b/client/ayon_core/hosts/maya/addon.py @@ -22,15 +22,15 @@ class MayaAddon(AYONAddon, IHostAddon): if norm_path not in new_python_paths: new_python_paths.append(norm_path) + # add vendor path + new_python_paths.append( + os.path.join(MAYA_ROOT_DIR, "vendor", "python") + ) env["PYTHONPATH"] = os.pathsep.join(new_python_paths) # Set default environments envs = { "AYON_LOG_NO_COLORS": "1", - # For python module 'qtpy' - "QT_API": "PySide2", - # For python module 'Qt' - "QT_PREFERRED_BINDING": "PySide2" } for key, value in envs.items(): env[key] = value diff --git a/client/ayon_core/hosts/maya/api/action.py b/client/ayon_core/hosts/maya/api/action.py index 4beb1e3e5b..d845ac6066 100644 --- a/client/ayon_core/hosts/maya/api/action.py +++ b/client/ayon_core/hosts/maya/api/action.py @@ -2,9 +2,12 @@ from __future__ import absolute_import import pyblish.api +import ayon_api -from ayon_core.client import get_asset_by_name -from ayon_core.pipeline.publish import get_errored_instances_from_context +from ayon_core.pipeline.publish import ( + get_errored_instances_from_context, + get_errored_plugins_from_context +) class GenerateUUIDsOnInvalidAction(pyblish.api.Action): @@ -74,21 +77,23 @@ class GenerateUUIDsOnInvalidAction(pyblish.api.Action): from . import lib - # Expecting this is called on validators in which case 'assetEntity' + # Expecting this is called on validators in which case 'folderEntity' # should be always available, but kept a way to query it by name. - asset_doc = instance.data.get("assetEntity") - if not asset_doc: - asset_name = instance.data["folderPath"] + folder_entity = instance.data.get("folderEntity") + if not folder_entity: + folder_path = instance.data["folderPath"] project_name = instance.context.data["projectName"] self.log.info(( - "Asset is not stored on instance." - " Querying by name \"{}\" from project \"{}\"" - ).format(asset_name, project_name)) - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["_id"] + "Folder is not stored on instance." + " Querying by path \"{}\" from project \"{}\"" + ).format(folder_path, project_name)) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} ) - for node, _id in lib.generate_ids(nodes, asset_id=asset_doc["_id"]): + for node, _id in lib.generate_ids( + nodes, folder_id=folder_entity["id"] + ): lib.set_id(node, _id, overwrite=True) @@ -110,20 +115,25 @@ class SelectInvalidAction(pyblish.api.Action): except ImportError: raise ImportError("Current host is not Maya") - errored_instances = get_errored_instances_from_context(context, - plugin=plugin) - # Get the invalid nodes for the plug-ins self.log.info("Finding invalid nodes..") invalid = list() - for instance in errored_instances: - invalid_nodes = plugin.get_invalid(instance) - if invalid_nodes: - if isinstance(invalid_nodes, (list, tuple)): - invalid.extend(invalid_nodes) - else: - self.log.warning("Plug-in returned to be invalid, " - "but has no selectable nodes.") + if issubclass(plugin, pyblish.api.ContextPlugin): + errored_plugins = get_errored_plugins_from_context(context) + if plugin in errored_plugins: + invalid = plugin.get_invalid(context) + else: + errored_instances = get_errored_instances_from_context( + context, plugin=plugin + ) + for instance in errored_instances: + invalid_nodes = plugin.get_invalid(instance) + if invalid_nodes: + if isinstance(invalid_nodes, (list, tuple)): + invalid.extend(invalid_nodes) + else: + self.log.warning("Plug-in returned to be invalid, " + "but has no selectable nodes.") # Ensure unique (process each node only once) invalid = list(set(invalid)) diff --git a/client/ayon_core/hosts/maya/api/commands.py b/client/ayon_core/hosts/maya/api/commands.py index f69dca97a8..22cf0871e2 100644 --- a/client/ayon_core/hosts/maya/api/commands.py +++ b/client/ayon_core/hosts/maya/api/commands.py @@ -1,9 +1,10 @@ # -*- coding: utf-8 -*- -"""OpenPype script commands to be used directly in Maya.""" +"""AYON script commands to be used directly in Maya.""" from maya import cmds -from ayon_core.client import get_asset_by_name, get_project -from ayon_core.pipeline import get_current_project_name, get_current_asset_name +from ayon_api import get_project, get_folder_by_path + +from ayon_core.pipeline import get_current_project_name, get_current_folder_path class ToolWindows: @@ -38,22 +39,30 @@ class ToolWindows: cls._windows[tool] = window -def _resolution_from_document(doc): - if not doc or "data" not in doc: - print("Entered document is not valid. \"{}\"".format(str(doc))) +def _resolution_from_entity(entity): + if not entity: + print("Entered entity is not valid. \"{}\"".format( + str(entity) + )) return None - resolution_width = doc["data"].get("resolutionWidth") - resolution_height = doc["data"].get("resolutionHeight") + attributes = entity.get("attrib") + if attributes is None: + attributes = entity.get("data", {}) + + resolution_width = attributes.get("resolutionWidth") + resolution_height = attributes.get("resolutionHeight") # Backwards compatibility if resolution_width is None or resolution_height is None: - resolution_width = doc["data"].get("resolution_width") - resolution_height = doc["data"].get("resolution_height") + resolution_width = attributes.get("resolution_width") + resolution_height = attributes.get("resolution_height") # Make sure both width and height are set if resolution_width is None or resolution_height is None: cmds.warning( - "No resolution information found for \"{}\"".format(doc["name"]) + "No resolution information found for \"{}\"".format( + entity["name"] + ) ) return None @@ -65,20 +74,20 @@ def reset_resolution(): resolution_width = 1920 resolution_height = 1080 - # Get resolution from asset + # Get resolution from folder project_name = get_current_project_name() - asset_name = get_current_asset_name() - asset_doc = get_asset_by_name(project_name, asset_name) - resolution = _resolution_from_document(asset_doc) + folder_path = get_current_folder_path() + folder_entity = get_folder_by_path(project_name, folder_path) + resolution = _resolution_from_entity(folder_entity) # Try get resolution from project if resolution is None: # TODO go through visualParents print(( - "Asset \"{}\" does not have set resolution." + "Folder '{}' does not have set resolution." " Trying to get resolution from project" - ).format(asset_name)) - project_doc = get_project(project_name) - resolution = _resolution_from_document(project_doc) + ).format(folder_path)) + project_entity = get_project(project_name) + resolution = _resolution_from_entity(project_entity) if resolution is None: msg = "Using default resolution {}x{}" diff --git a/client/ayon_core/hosts/maya/api/customize.py b/client/ayon_core/hosts/maya/api/customize.py index da046b538d..16255f69ba 100644 --- a/client/ayon_core/hosts/maya/api/customize.py +++ b/client/ayon_core/hosts/maya/api/customize.py @@ -109,11 +109,13 @@ def override_toolbox_ui(): controls.append( cmds.iconTextButton( - "pype_toolbox_lookmanager", + "ayon_toolbox_lookmanager", annotation="Look Manager", label="Look Manager", image=os.path.join(icons, "lookmanager.png"), - command=show_look_assigner, + command=lambda: show_look_assigner( + parent=parent_widget + ), width=icon_size, height=icon_size, parent=parent @@ -122,7 +124,7 @@ def override_toolbox_ui(): controls.append( cmds.iconTextButton( - "pype_toolbox_workfiles", + "ayon_toolbox_workfiles", annotation="Work Files", label="Work Files", image=os.path.join(icons, "workfiles.png"), @@ -137,7 +139,7 @@ def override_toolbox_ui(): controls.append( cmds.iconTextButton( - "pype_toolbox_loader", + "ayon_toolbox_loader", annotation="Loader", label="Loader", image=os.path.join(icons, "loader.png"), @@ -152,7 +154,7 @@ def override_toolbox_ui(): controls.append( cmds.iconTextButton( - "pype_toolbox_manager", + "ayon_toolbox_manager", annotation="Inventory", label="Inventory", image=os.path.join(icons, "inventory.png"), diff --git a/client/ayon_core/hosts/maya/api/fbx.py b/client/ayon_core/hosts/maya/api/fbx.py index 97e95d2ec4..939da4011b 100644 --- a/client/ayon_core/hosts/maya/api/fbx.py +++ b/client/ayon_core/hosts/maya/api/fbx.py @@ -2,8 +2,6 @@ """Tools to work with FBX.""" import logging -from pyblish.api import Instance - from maya import cmds # noqa import maya.mel as mel # noqa from ayon_core.hosts.maya.api.lib import maintained_selection @@ -146,7 +144,6 @@ class FBXExtractor: return options def set_options_from_instance(self, instance): - # type: (Instance) -> None """Sets FBX export options from data in the instance. Args: diff --git a/client/ayon_core/hosts/maya/api/lib.py b/client/ayon_core/hosts/maya/api/lib.py index 1aa2244111..321bcbc0b5 100644 --- a/client/ayon_core/hosts/maya/api/lib.py +++ b/client/ayon_core/hosts/maya/api/lib.py @@ -19,18 +19,12 @@ from six import string_types from maya import cmds, mel from maya.api import OpenMaya -from ayon_core.client import ( - get_project, - get_asset_by_name, - get_subsets, - get_last_versions, - get_representation_by_name, - get_asset_name_identifier, -) +import ayon_api + from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( get_current_project_name, - get_current_asset_name, + get_current_folder_path, get_current_task_name, discover_loader_plugins, loaders_from_representation, @@ -43,7 +37,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, ) from ayon_core.lib import NumberDef -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_task_entity from ayon_core.pipeline.create import CreateContext from ayon_core.lib.profiles_filtering import filter_profiles @@ -137,7 +131,7 @@ def get_main_window(): def suspended_refresh(suspend=True): """Suspend viewport refreshes - cmds.ogs(pause=True) is a toggle so we cant pass False. + cmds.ogs(pause=True) is a toggle so we can't pass False. """ if IS_HEADLESS: yield @@ -284,16 +278,16 @@ def generate_capture_preset(instance, camera, path, width_preset = capture_preset["Resolution"]["width"] height_preset = capture_preset["Resolution"]["height"] - # Set resolution variables from asset values - asset_data = instance.data["assetEntity"]["data"] - asset_width = asset_data.get("resolutionWidth") - asset_height = asset_data.get("resolutionHeight") + # Set resolution variables from folder values + folder_attributes = instance.data["folderEntity"]["attrib"] + folder_width = folder_attributes.get("resolutionWidth") + folder_height = folder_attributes.get("resolutionHeight") review_instance_width = instance.data.get("review_width") review_instance_height = instance.data.get("review_height") # Use resolution from instance if review width/height is set # Otherwise use the resolution from preset if it has non-zero values - # Otherwise fall back to asset width x height + # Otherwise fall back to folder width x height # Else define no width, then `capture.capture` will use render resolution if review_instance_width and review_instance_height: preset["width"] = review_instance_width @@ -301,9 +295,9 @@ def generate_capture_preset(instance, camera, path, elif width_preset and height_preset: preset["width"] = width_preset preset["height"] = height_preset - elif asset_width and asset_height: - preset["width"] = asset_width - preset["height"] = asset_height + elif folder_width and folder_height: + preset["width"] = folder_width + preset["height"] = folder_height # Isolate view is requested by having objects in the set besides a # camera. If there is only 1 member it'll be the camera because we @@ -589,7 +583,7 @@ def pairwise(iterable): def collect_animation_defs(fps=False): - """Get the basic animation attribute defintions for the publisher. + """Get the basic animation attribute definitions for the publisher. Returns: OrderedDict @@ -1525,24 +1519,30 @@ def extract_alembic(file, # region ID -def get_id_required_nodes(referenced_nodes=False, nodes=None): - """Filter out any node which are locked (reference) or readOnly +def get_id_required_nodes(referenced_nodes=False, + nodes=None, + existing_ids=True): + """Return nodes that should receive a `cbId` attribute. + + This includes only mesh and curve nodes, parent transforms of the shape + nodes, file texture nodes and object sets (including shading engines). + + This filters out any node which is locked, referenced, read-only, + intermediate object. Args: - referenced_nodes (bool): set True to filter out reference nodes + referenced_nodes (bool): set True to include referenced nodes nodes (list, Optional): nodes to consider + existing_ids (bool): set True to include nodes with `cbId` attribute + Returns: nodes (set): list of filtered nodes """ - lookup = None - if nodes is None: - # Consider all nodes - nodes = cmds.ls() - else: - # Build a lookup for the only allowed nodes in output based - # on `nodes` input of the function (+ ensure long names) - lookup = set(cmds.ls(nodes, long=True)) + if nodes is not None and not nodes: + # User supplied an empty `nodes` list to check so all we can + # do is return the empty result + return set() def _node_type_exists(node_type): try: @@ -1551,63 +1551,142 @@ def get_id_required_nodes(referenced_nodes=False, nodes=None): except RuntimeError: return False + def iterate(maya_iterator): + while not maya_iterator.isDone(): + yield maya_iterator.thisNode() + maya_iterator.next() + # `readOnly` flag is obsolete as of Maya 2016 therefore we explicitly # remove default nodes and reference nodes - camera_shapes = ["frontShape", "sideShape", "topShape", "perspShape"] + default_camera_shapes = { + "frontShape", "sideShape", "topShape", "perspShape" + } - ignore = set() - if not referenced_nodes: - ignore |= set(cmds.ls(long=True, referencedNodes=True)) - - # list all defaultNodes to filter out from the rest - ignore |= set(cmds.ls(long=True, defaultNodes=True)) - ignore |= set(cmds.ls(camera_shapes, long=True)) - - # Remove Turtle from the result of `cmds.ls` if Turtle is loaded - # TODO: This should be a less specific check for a single plug-in. - if _node_type_exists("ilrBakeLayer"): - ignore |= set(cmds.ls(type="ilrBakeLayer", long=True)) - - # Establish set of nodes types to include - types = ["objectSet", "file", "mesh", "nurbsCurve", "nurbsSurface"] + # The filtered types do not include transforms because we only want the + # parent transforms that have a child shape that we filtered to, so we + # include the parents here + types = ["mesh", "nurbsCurve", "nurbsSurface", "file", "objectSet"] # Check if plugin nodes are available for Maya by checking if the plugin # is loaded if cmds.pluginInfo("pgYetiMaya", query=True, loaded=True): types.append("pgYetiMaya") - # We *always* ignore intermediate shapes, so we filter them out directly - nodes = cmds.ls(nodes, type=types, long=True, noIntermediate=True) + iterator_type = OpenMaya.MIteratorType() + # This tries to be closest matching API equivalents of `types` variable + iterator_type.filterList = [ + OpenMaya.MFn.kMesh, # mesh + OpenMaya.MFn.kNurbsSurface, # nurbsSurface + OpenMaya.MFn.kNurbsCurve, # nurbsCurve + OpenMaya.MFn.kFileTexture, # file + OpenMaya.MFn.kSet, # objectSet + OpenMaya.MFn.kPluginShape # pgYetiMaya + ] + it = OpenMaya.MItDependencyNodes(iterator_type) - # The items which need to pass the id to their parent - # Add the collected transform to the nodes - dag = cmds.ls(nodes, type="dagNode", long=True) # query only dag nodes - transforms = cmds.listRelatives(dag, - parent=True, - fullPath=True) or [] + fn_dep = OpenMaya.MFnDependencyNode() + fn_dag = OpenMaya.MFnDagNode() + result = set() - nodes = set(nodes) - nodes |= set(transforms) + def _should_include_parents(obj): + """Whether to include parents of obj in output""" + if not obj.hasFn(OpenMaya.MFn.kShape): + return False - nodes -= ignore # Remove the ignored nodes - if not nodes: - return nodes + fn_dag.setObject(obj) + if fn_dag.isIntermediateObject: + return False - # Ensure only nodes from the input `nodes` are returned when a - # filter was applied on function call because we also iterated - # to parents and alike - if lookup is not None: - nodes &= lookup + # Skip default cameras + if ( + obj.hasFn(OpenMaya.MFn.kCamera) and + fn_dag.name() in default_camera_shapes + ): + return False - # Avoid locked nodes - nodes_list = list(nodes) - locked = cmds.lockNode(nodes_list, query=True, lock=True) - for node, lock in zip(nodes_list, locked): - if lock: - log.warning("Skipping locked node: %s" % node) - nodes.remove(node) + return True - return nodes + def _add_to_result_if_valid(obj): + """Add to `result` if the object should be included""" + fn_dep.setObject(obj) + if not existing_ids and fn_dep.hasAttribute("cbId"): + return + + if not referenced_nodes and fn_dep.isFromReferencedFile: + return + + if fn_dep.isDefaultNode: + return + + if fn_dep.isLocked: + return + + # Skip default cameras + if ( + obj.hasFn(OpenMaya.MFn.kCamera) and + fn_dep.name() in default_camera_shapes + ): + return + + if obj.hasFn(OpenMaya.MFn.kDagNode): + # DAG nodes + fn_dag.setObject(obj) + + # Skip intermediate objects + if fn_dag.isIntermediateObject: + return + + # DAG nodes can be instanced and thus may have multiple paths. + # We need to identify each path + paths = OpenMaya.MDagPath.getAllPathsTo(obj) + for dag in paths: + path = dag.fullPathName() + result.add(path) + else: + # Dependency node + path = fn_dep.name() + result.add(path) + + for obj in iterate(it): + # For any non-intermediate shape node always include the parent + # even if we exclude the shape itself (e.g. when locked, default) + if _should_include_parents(obj): + fn_dag.setObject(obj) + parents = [ + fn_dag.parent(index) for index in range(fn_dag.parentCount()) + ] + for parent_obj in parents: + _add_to_result_if_valid(parent_obj) + + _add_to_result_if_valid(obj) + + if not result: + return result + + # Exclude some additional types + exclude_types = [] + if _node_type_exists("ilrBakeLayer"): + # Remove Turtle from the result if Turtle is loaded + exclude_types.append("ilrBakeLayer") + + if exclude_types: + exclude_nodes = set(cmds.ls(nodes, long=True, type=exclude_types)) + if exclude_nodes: + result -= exclude_nodes + + # Filter to explicit input nodes if provided + if nodes is not None: + # The amount of input nodes to filter to can be large and querying + # many nodes can be slow in Maya. As such we want to try and reduce + # it as much as possible, so we include the type filter to try and + # reduce the result of `maya.cmds.ls` here. + nodes = set(cmds.ls(nodes, long=True, type=types + ["dagNode"])) + if nodes: + result &= nodes + else: + return set() + + return result def get_id(node): @@ -1638,7 +1717,7 @@ def get_id(node): return -def generate_ids(nodes, asset_id=None): +def generate_ids(nodes, folder_id=None): """Returns new unique ids for the given nodes. Note: This does not assign the new ids, it only generates the values. @@ -1655,27 +1734,33 @@ def generate_ids(nodes, asset_id=None): Args: nodes (list): List of nodes. - asset_id (str or bson.ObjectId): The database id for the *asset* to - generate for. When None provided the current asset in the - active session is used. + folder_id (Optional[str]): Folder id to generate id for. When None + provided current folder is used. Returns: list: A list of (node, id) tuples. """ - if asset_id is None: - # Get the asset ID from the database for the asset of current context + if folder_id is None: + # Get the folder id based on current context folder project_name = get_current_project_name() - asset_name = get_current_asset_name() - asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) - assert asset_doc, "No current asset found in Session" - asset_id = asset_doc['_id'] + folder_path = get_current_folder_path() + if not folder_path: + raise ValueError("Current folder path is not set") + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields=["id"] + ) + if not folder_entity: + raise ValueError(( + "Current folder '{}' was not found on the server" + ).format(folder_path)) + folder_id = folder_entity["id"] node_ids = [] for node in nodes: _, uid = str(uuid.uuid4()).rsplit("-", 1) - unique_id = "{}:{}".format(asset_id, uid) + unique_id = "{}:{}".format(folder_id, uid) node_ids.append((node, unique_id)) return node_ids @@ -1832,6 +1917,29 @@ def apply_attributes(attributes, nodes_by_id): set_attribute(attr, value, node) +def is_valid_reference_node(reference_node): + """Return whether Maya considers the reference node a valid reference. + + Maya might report an error when using `maya.cmds.referenceQuery`: + Reference node 'reference_node' is not associated with a reference file. + + Note that this does *not* check whether the reference node points to an + existing file. Instead it only returns whether maya considers it valid + and thus is not an unassociated reference node + + Arguments: + reference_node (str): Reference node name + + Returns: + bool: Whether reference node is a valid reference + + """ + sel = OpenMaya.MSelectionList() + sel.add(reference_node) + depend_node = sel.getDependNode(0) + return OpenMaya.MFnReference(depend_node).isValidReference() + + def get_container_members(container): """Returns the members of a container. This includes the nodes from any loaded references in the container. @@ -1857,7 +1965,16 @@ def get_container_members(container): if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): continue - reference_members = cmds.referenceQuery(ref, nodes=True, dagPath=True) + try: + reference_members = cmds.referenceQuery(ref, + nodes=True, + dagPath=True) + except RuntimeError: + # Ignore reference nodes that are not associated with a + # referenced file on which `referenceQuery` command fails + if not is_valid_reference_node(ref): + continue + raise reference_members = cmds.ls(reference_members, long=True, objectsOnly=True) @@ -1867,21 +1984,18 @@ def get_container_members(container): # region LOOKDEV -def list_looks(project_name, asset_id): - """Return all look products for the given asset +def list_looks(project_name, folder_id): + """Return all look products for the given folder. This assumes all look products start with "look*" in their names. + + Returns: + list[dict[str, Any]]: List of look products. + """ - # # get all products with look leading in - # the name associated with the asset - # TODO this should probably look for product type 'look' instead of - # checking product name that can not start with product type - subset_docs = get_subsets(project_name, asset_ids=[asset_id]) - return [ - subset_doc - for subset_doc in subset_docs - if subset_doc["name"].startswith("look") - ] + return list(ayon_api.get_products( + project_name, folder_ids=[folder_id], product_types={"look"} + )) def assign_look_by_version(nodes, version_id): @@ -1900,16 +2014,19 @@ def assign_look_by_version(nodes, version_id): project_name = get_current_project_name() # Get representations of shader file and relationships - look_representation = get_representation_by_name( - project_name, "ma", version_id - ) - json_representation = get_representation_by_name( - project_name, "json", version_id - ) + representations = list(ayon_api.get_representations( + project_name=project_name, + representation_names={"ma", "json"}, + version_ids=[version_id] + )) + look_representation = next( + repre for repre in representations if repre["name"] == "ma") + json_representation = next( + repre for repre in representations if repre["name"] == "json") # See if representation is already loaded, if so reuse it. host = registered_host() - representation_id = str(look_representation['_id']) + representation_id = look_representation["id"] for container in host.ls(): if (container['loader'] == "LookLoader" and container['representation'] == representation_id): @@ -1942,10 +2059,10 @@ def assign_look_by_version(nodes, version_id): apply_shaders(relationships, shader_nodes, nodes) -def assign_look(nodes, product_name="lookDefault"): +def assign_look(nodes, product_name="lookMain"): """Assigns a look to a node. - Optimizes the nodes by grouping by asset id and finding + Optimizes the nodes by grouping by folder id and finding related product by name. Args: @@ -1953,66 +2070,61 @@ def assign_look(nodes, product_name="lookDefault"): product_name (str): name of the product to find """ - # Group all nodes per asset id + # Group all nodes per folder id grouped = defaultdict(list) for node in nodes: - pype_id = get_id(node) - if not pype_id: + hash_id = get_id(node) + if not hash_id: continue - parts = pype_id.split(":", 1) + parts = hash_id.split(":", 1) grouped[parts[0]].append(node) project_name = get_current_project_name() - subset_docs = get_subsets( - project_name, subset_names=[product_name], asset_ids=grouped.keys() + product_entities = ayon_api.get_products( + project_name, product_names=[product_name], folder_ids=grouped.keys() ) - subset_docs_by_asset_id = { - str(subset_doc["parent"]): subset_doc - for subset_doc in subset_docs + product_entities_by_folder_id = { + product_entity["folderId"]: product_entity + for product_entity in product_entities } product_ids = { - subset_doc["_id"] - for subset_doc in subset_docs_by_asset_id.values() + product_entity["id"] + for product_entity in product_entities_by_folder_id.values() } - last_version_docs = get_last_versions( + last_version_entities_by_product_id = ayon_api.get_last_versions( project_name, - subset_ids=product_ids, - fields=["_id", "name", "data.families"] + product_ids ) - last_version_docs_by_product_id = { - last_version_doc["parent"]: last_version_doc - for last_version_doc in last_version_docs - } - for asset_id, asset_nodes in grouped.items(): - # create objectId for database - subset_doc = subset_docs_by_asset_id.get(asset_id) - if not subset_doc: + for folder_id, asset_nodes in grouped.items(): + product_entity = product_entities_by_folder_id.get(folder_id) + if not product_entity: log.warning(( "No product '{}' found for {}" - ).format(product_name, asset_id)) + ).format(product_name, folder_id)) continue - last_version = last_version_docs_by_product_id.get(subset_doc["_id"]) + product_id = product_entity["id"] + last_version = last_version_entities_by_product_id.get(product_id) if not last_version: log.warning(( - "Not found last version for product '{}' on asset with id {}" - ).format(product_name, asset_id)) + "Not found last version for product '{}' on folder with id {}" + ).format(product_name, folder_id)) continue - families = last_version.get("data", {}).get("families") or [] + families = last_version.get("attrib", {}).get("families") or [] if "look" not in families: log.warning(( - "Last version for product '{}' on asset with id {}" + "Last version for product '{}' on folder with id {}" " does not have look product type" - ).format(product_name, asset_id)) + ).format(product_name, folder_id)) continue log.debug("Assigning look '{}' ".format( - product_name, last_version["name"])) + product_name, last_version["version"])) - assign_look_by_version(asset_nodes, last_version["_id"]) + assign_look_by_version(asset_nodes, last_version["id"]) def apply_shaders(relationships, shadernodes, nodes): @@ -2120,22 +2232,6 @@ def get_related_sets(node): """ - # Ignore specific suffices - ignore_suffices = ["out_SET", "controls_SET", "_INST", "_CON"] - - # Default nodes to ignore - defaults = {"defaultLightSet", "defaultObjectSet"} - - # Ids to ignore - ignored = { - AVALON_INSTANCE_ID, - AVALON_CONTAINER_ID, - AYON_INSTANCE_ID, - AYON_CONTAINER_ID, - } - - view_sets = get_isolate_view_sets() - sets = cmds.listSets(object=node, extendToShape=False) if not sets: return [] @@ -2146,25 +2242,47 @@ def get_related_sets(node): # returned by `cmds.listSets(allSets=True)` sets = cmds.ls(sets) + # Ids to ignore + ignored = { + AVALON_INSTANCE_ID, + AVALON_CONTAINER_ID, + AYON_INSTANCE_ID, + AYON_CONTAINER_ID, + } + # Ignore `avalon.container` - sets = [s for s in sets if - not cmds.attributeQuery("id", node=s, exists=True) or - not cmds.getAttr("%s.id" % s) in ignored] + sets = [ + s for s in sets + if ( + not cmds.attributeQuery("id", node=s, exists=True) + or cmds.getAttr(f"{s}.id") not in ignored + ) + ] + if not sets: + return sets # Exclude deformer sets (`type=2` for `maya.cmds.listSets`) - deformer_sets = cmds.listSets(object=node, - extendToShape=False, - type=2) or [] - deformer_sets = set(deformer_sets) # optimize lookup - sets = [s for s in sets if s not in deformer_sets] + exclude_sets = cmds.listSets(object=node, + extendToShape=False, + type=2) or [] + exclude_sets = set(exclude_sets) # optimize lookup + + # Default nodes to ignore + exclude_sets.update({"defaultLightSet", "defaultObjectSet"}) + + # Filter out the sets to exclude + sets = [s for s in sets if s not in exclude_sets] # Ignore when the set has a specific suffix - sets = [s for s in sets if not any(s.endswith(x) for x in ignore_suffices)] + ignore_suffices = ("out_SET", "controls_SET", "_INST", "_CON") + sets = [s for s in sets if not s.endswith(ignore_suffices)] + if not sets: + return sets # Ignore viewport filter view sets (from isolate select and # viewports) + view_sets = get_isolate_view_sets() sets = [s for s in sets if s not in view_sets] - sets = [s for s in sets if s not in defaults] return sets @@ -2435,12 +2553,10 @@ def set_scene_fps(fps, update=True): cmds.currentUnit(time=unit, updateAnimation=update) # Set time slider data back to previous state - cmds.playbackOptions(edit=True, minTime=start_frame) - cmds.playbackOptions(edit=True, maxTime=end_frame) - - # Set animation data - cmds.playbackOptions(edit=True, animationStartTime=animation_start) - cmds.playbackOptions(edit=True, animationEndTime=animation_end) + cmds.playbackOptions(minTime=start_frame, + maxTime=end_frame, + animationStartTime=animation_start, + animationEndTime=animation_end) cmds.currentTime(current_frame, edit=True, update=True) @@ -2498,14 +2614,16 @@ def get_fps_for_current_context(): """ project_name = get_current_project_name() - asset_name = get_current_asset_name() - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.fps"] + folder_path = get_current_folder_path() + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"attrib.fps"} ) or {} - fps = asset_doc.get("data", {}).get("fps") + fps = folder_entity.get("attrib", {}).get("fps") if not fps: - project_doc = get_project(project_name, fields=["data.fps"]) or {} - fps = project_doc.get("data", {}).get("fps") + project_entity = ayon_api.get_project( + project_name, fields=["attrib.fps"] + ) or {} + fps = project_entity.get("attrib", {}).get("fps") if not fps: fps = 25 @@ -2514,7 +2632,7 @@ def get_fps_for_current_context(): def get_frame_range(include_animation_range=False): - """Get the current assets frame range and handles. + """Get the current task frame range and handles. Args: include_animation_range (bool, optional): Whether to include @@ -2522,24 +2640,34 @@ def get_frame_range(include_animation_range=False): range of the timeline. It is excluded by default. Returns: - dict: Asset's expected frame range values. + dict: Task's expected frame range values. """ # Set frame start/end project_name = get_current_project_name() - asset_name = get_current_asset_name() - asset = get_asset_by_name(project_name, asset_name) + folder_path = get_current_folder_path() + task_name = get_current_task_name() - frame_start = asset["data"].get("frameStart") - frame_end = asset["data"].get("frameEnd") + folder_entity = ayon_api.get_folder_by_path( + project_name, + folder_path, + fields={"id"}) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + + task_attributes = task_entity["attrib"] + + frame_start = task_attributes.get("frameStart") + frame_end = task_attributes.get("frameEnd") if frame_start is None or frame_end is None: - cmds.warning("No edit information found for %s" % asset_name) + cmds.warning("No edit information found for '{}'".format(folder_path)) return - handle_start = asset["data"].get("handleStart") or 0 - handle_end = asset["data"].get("handleEnd") or 0 + handle_start = task_attributes.get("handleStart") or 0 + handle_end = task_attributes.get("handleEnd") or 0 frame_range = { "frameStart": frame_start, @@ -2553,17 +2681,19 @@ def get_frame_range(include_animation_range=False): # Some usages of this function use the full dictionary to define # instance attributes for which we want to exclude the animation # keys. That is why these are excluded by default. - task_name = get_current_task_name() + settings = get_project_settings(project_name) + + task_type = task_entity["taskType"] + include_handles_settings = settings["maya"]["include_handles"] - current_task = asset.get("data").get("tasks").get(task_name) animation_start = frame_start animation_end = frame_end include_handles = include_handles_settings["include_handles_default"] for item in include_handles_settings["per_task_type"]: - if current_task["type"] in item["task_type"]: + if task_type in item["task_type"]: include_handles = item["include_handles"] break if include_handles: @@ -2577,7 +2707,7 @@ def get_frame_range(include_animation_range=False): def reset_frame_range(playback=True, render=True, fps=True): - """Set frame range to current asset + """Set frame range to current folder. Args: playback (bool, Optional): Whether to set the maya timeline playback @@ -2591,7 +2721,7 @@ def reset_frame_range(playback=True, render=True, fps=True): frame_range = get_frame_range(include_animation_range=True) if not frame_range: - # No frame range data found for asset + # No frame range data found for folder return frame_start = frame_range["frameStart"] @@ -2616,57 +2746,131 @@ def reset_frame_range(playback=True, render=True, fps=True): def reset_scene_resolution(): """Apply the scene resolution from the project definition - scene resolution can be overwritten by an asset if the asset.data contains - any information regarding scene resolution . + The scene resolution will be retrieved from the current task entity's + attributes. Returns: None """ - project_name = get_current_project_name() - project_doc = get_project(project_name) - project_data = project_doc["data"] - asset_data = get_current_project_asset()["data"] + task_attributes = get_current_task_entity(fields={"attrib"})["attrib"] - # Set project resolution - width_key = "resolutionWidth" - height_key = "resolutionHeight" - pixelAspect_key = "pixelAspect" + # Set resolution + width = task_attributes.get("resolutionWidth", 1920) + height = task_attributes.get("resolutionHeight", 1080) + pixel_aspect = task_attributes.get("pixelAspect", 1) - width = asset_data.get(width_key, project_data.get(width_key, 1920)) - height = asset_data.get(height_key, project_data.get(height_key, 1080)) - pixelAspect = asset_data.get(pixelAspect_key, - project_data.get(pixelAspect_key, 1)) - - set_scene_resolution(width, height, pixelAspect) + set_scene_resolution(width, height, pixel_aspect) -def set_context_settings(): +def set_context_settings( + fps=True, + resolution=True, + frame_range=True, + colorspace=True +): """Apply the project settings from the project definition Settings can be overwritten by an asset if the asset.data contains any information regarding those settings. - Examples of settings: - fps - resolution - renderer + Args: + fps (bool): Whether to set the scene FPS. + resolution (bool): Whether to set the render resolution. + frame_range (bool): Whether to reset the time slide frame ranges. + colorspace (bool): Whether to reset the colorspace. Returns: None + """ + if fps: + # Set project fps + set_scene_fps(get_fps_for_current_context()) - - # Set project fps - set_scene_fps(get_fps_for_current_context()) - - reset_scene_resolution() + if resolution: + reset_scene_resolution() # Set frame range. - reset_frame_range() + if frame_range: + reset_frame_range(fps=False) # Set colorspace - set_colorspace() + if colorspace: + set_colorspace() + + +def prompt_reset_context(): + """Prompt the user what context settings to reset. + This prompt is used on saving to a different task to allow the scene to + get matched to the new context. + """ + # TODO: Cleanup this prototyped mess of imports and odd dialog + from ayon_core.tools.attribute_defs.dialog import ( + AttributeDefinitionsDialog + ) + from ayon_core.style import load_stylesheet + from ayon_core.lib import BoolDef, UILabelDef + + definitions = [ + UILabelDef( + label=( + "You are saving your workfile into a different folder or task." + "\n\n" + "Would you like to update some settings to the new context?\n" + ) + ), + BoolDef( + "fps", + label="FPS", + tooltip="Reset workfile FPS", + default=True + ), + BoolDef( + "frame_range", + label="Frame Range", + tooltip="Reset workfile start and end frame ranges", + default=True + ), + BoolDef( + "resolution", + label="Resolution", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "colorspace", + label="Colorspace", + tooltip="Reset workfile resolution", + default=True + ), + BoolDef( + "instances", + label="Publish instances", + tooltip="Update all publish instance's folder and task to match " + "the new folder and task", + default=True + ), + ] + + dialog = AttributeDefinitionsDialog(definitions) + dialog.setWindowTitle("Saving to different context.") + dialog.setStyleSheet(load_stylesheet()) + if not dialog.exec_(): + return None + + options = dialog.get_values() + with suspended_refresh(): + set_context_settings( + fps=options["fps"], + resolution=options["resolution"], + frame_range=options["frame_range"], + colorspace=options["colorspace"] + ) + if options["instances"]: + update_content_on_context_change() + + dialog.deleteLater() # Valid FPS @@ -2926,13 +3130,13 @@ def bake_to_world_space(nodes, def load_capture_preset(data): - """Convert OpenPype Extract Playblast settings to `capture` arguments + """Convert AYON Extract Playblast settings to `capture` arguments Input data is the settings from: `project_settings/maya/publish/ExtractPlayblast/capture_preset` Args: - data (dict): Capture preset settings from OpenPype settings + data (dict): Capture preset settings from AYON settings Returns: dict: `capture.capture` compatible keyword arguments @@ -3042,7 +3246,7 @@ def load_capture_preset(data): return options -def get_attr_in_layer(attr, layer): +def get_attr_in_layer(attr, layer, as_string=True): """Return attribute value in specified renderlayer. Same as cmds.getAttr but this gets the attribute's value in a @@ -3060,6 +3264,7 @@ def get_attr_in_layer(attr, layer): Args: attr (str): attribute name, ex. "node.attribute" layer (str): layer name + as_string (bool): whether attribute should convert to a string value Returns: The return value from `maya.cmds.getAttr` @@ -3069,7 +3274,8 @@ def get_attr_in_layer(attr, layer): try: if cmds.mayaHasRenderSetup(): from . import lib_rendersetup - return lib_rendersetup.get_attr_in_layer(attr, layer) + return lib_rendersetup.get_attr_in_layer( + attr, layer, as_string=as_string) except AttributeError: pass @@ -3077,7 +3283,7 @@ def get_attr_in_layer(attr, layer): current_layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) if layer == current_layer: - return cmds.getAttr(attr) + return cmds.getAttr(attr, asString=as_string) connections = cmds.listConnections(attr, plugs=True, @@ -3128,7 +3334,7 @@ def get_attr_in_layer(attr, layer): value *= conversion return value - return cmds.getAttr(attr) + return cmds.getAttr(attr, asString=as_string) def fix_incompatible_containers(): @@ -3157,33 +3363,46 @@ def update_content_on_context_change(): """ This will update scene content to match new folder on context change """ - scene_sets = cmds.listSets(allSets=True) - asset_doc = get_current_project_asset() - new_folder_path = get_asset_name_identifier(asset_doc) - new_data = asset_doc["data"] - for s in scene_sets: - try: - if cmds.getAttr("{}.id".format(s)) in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - attr = cmds.listAttr(s) - print(s) - if "folderPath" in attr: - print( - " - setting folder to: [ {} ]".format(new_folder_path) - ) - cmds.setAttr( - "{}.folderPath".format(s), - new_folder_path, type="string" - ) - if "frameStart" in attr: - cmds.setAttr("{}.frameStart".format(s), - new_data["frameStart"]) - if "frameEnd" in attr: - cmds.setAttr("{}.frameEnd".format(s), - new_data["frameEnd"],) - except ValueError: - pass + + host = registered_host() + create_context = CreateContext(host) + folder_entity = get_current_task_entity(fields={"attrib"}) + + instance_values = { + "folderPath": create_context.get_current_folder_path(), + "task": create_context.get_current_task_name(), + } + creator_attribute_values = { + "frameStart": folder_entity["attrib"]["frameStart"], + "frameEnd": folder_entity["attrib"]["frameEnd"], + } + + has_changes = False + for instance in create_context.instances: + for key, value in instance_values.items(): + if key not in instance or instance[key] == value: + continue + + # Update instance value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + creator_attributes = instance.creator_attributes + for key, value in creator_attribute_values.items(): + if ( + key not in creator_attributes + or creator_attributes[key] == value + ): + continue + + # Update instance creator attribute value + print(f"Updating {instance.product_name} {key} to: {value}") + instance[key] = value + has_changes = True + + if has_changes: + create_context.save_changes() def show_message(title, msg): @@ -3283,7 +3502,7 @@ def set_colorspace(): else: # TODO: deprecated code from 3.15.5 - remove # Maya 2022+ introduces new OCIO v2 color management settings that - # can override the old color management preferences. OpenPype has + # can override the old color management preferences. AYON has # separate settings for both so we fall back when necessary. use_ocio_v2 = imageio["colorManagementPreference_v2"]["enabled"] if use_ocio_v2 and not ocio_v2_support: @@ -3829,7 +4048,7 @@ def get_color_management_output_transform(): def image_info(file_path): # type: (str) -> dict - """Based on tha texture path, get its bit depth and format information. + """Based on the texture path, get its bit depth and format information. Take reference from makeTx.py in Arnold: ImageInfo(filename): Get Image Information for colorspace AiTextureGetFormat(filename): Get Texture Format @@ -3917,17 +4136,26 @@ def len_flattened(components): return n -def get_all_children(nodes): +def get_all_children(nodes, ignore_intermediate_objects=False): """Return all children of `nodes` including each instanced child. Using maya.cmds.listRelatives(allDescendents=True) includes only the first instance. As such, this function acts as an optimal replacement with a focus on a fast query. + Args: + nodes (iterable): List of nodes to get children for. + ignore_intermediate_objects (bool): Ignore any children that + are intermediate objects. + + Returns: + set: Children of input nodes. + """ sel = OpenMaya.MSelectionList() traversed = set() iterator = OpenMaya.MItDag(OpenMaya.MItDag.kDepthFirst) + fn_dag = OpenMaya.MFnDagNode() for node in nodes: if node in traversed: @@ -3944,6 +4172,13 @@ def get_all_children(nodes): iterator.next() # noqa: B305 while not iterator.isDone(): + if ignore_intermediate_objects: + fn_dag.setObject(iterator.currentItem()) + if fn_dag.isIntermediateObject: + iterator.prune() + iterator.next() # noqa: B305 + continue + path = iterator.fullPathName() if path in traversed: @@ -3954,7 +4189,7 @@ def get_all_children(nodes): traversed.add(path) iterator.next() # noqa: B305 - return list(traversed) + return traversed def get_capture_preset( @@ -3968,7 +4203,7 @@ def get_capture_preset( Args: task_name (str): Task name. task_type (str): Task type. - product_name (str): Subset name. + product_name (str): Product name. project_settings (dict): Project settings. log (logging.Logger): Logging object. """ @@ -4035,6 +4270,9 @@ def get_reference_node(members, log=None): if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"): continue + if not is_valid_reference_node(ref): + continue + references.add(ref) assert references, "No reference node found in container" @@ -4065,15 +4303,19 @@ def get_reference_node_parents(ref): list: The upstream parent reference nodes. """ - parent = cmds.referenceQuery(ref, - referenceNode=True, - parent=True) + def _get_parent(reference_node): + """Return parent reference node, but ignore invalid reference nodes""" + if not is_valid_reference_node(reference_node): + return + return cmds.referenceQuery(reference_node, + referenceNode=True, + parent=True) + + parent = _get_parent(ref) parents = [] while parent: parents.append(parent) - parent = cmds.referenceQuery(parent, - referenceNode=True, - parent=True) + parent = _get_parent(parent) return parents @@ -4119,15 +4361,24 @@ def create_rig_animation_instance( ) assert roots, "No root nodes in rig, this is a bug." + folder_entity = context["folder"] + product_entity = context["product"] + product_type = product_entity["productType"] + product_name = product_entity["name"] + custom_product_name = options.get("animationProductName") if custom_product_name: formatting_data = { - "asset": context["asset"], - "subset": context['subset']['name'], - "family": ( - context['subset']['data'].get('family') or - context['subset']['data']['families'][0] - ) + "folder": { + "name": folder_entity["name"] + }, + "product": { + "type": product_type, + "name": product_name, + }, + "asset": folder_entity["name"], + "subset": product_name, + "family": product_type } namespace = get_custom_namespace( custom_product_name.format(**formatting_data) diff --git a/client/ayon_core/hosts/maya/api/lib_renderproducts.py b/client/ayon_core/hosts/maya/api/lib_renderproducts.py index 7f26145e1d..52c282c6de 100644 --- a/client/ayon_core/hosts/maya/api/lib_renderproducts.py +++ b/client/ayon_core/hosts/maya/api/lib_renderproducts.py @@ -297,7 +297,7 @@ class ARenderProducts: """ return self._get_attr("defaultRenderGlobals", attribute) - def _get_attr(self, node_attr, attribute=None): + def _get_attr(self, node_attr, attribute=None, as_string=True): """Return the value of the attribute in the renderlayer For readability this allows passing in the attribute in two ways. @@ -317,7 +317,7 @@ class ARenderProducts: else: plug = "{}.{}".format(node_attr, attribute) - return lib.get_attr_in_layer(plug, layer=self.layer) + return lib.get_attr_in_layer(plug, layer=self.layer, as_string=as_string) @staticmethod def extract_separator(file_prefix): @@ -720,7 +720,8 @@ class RenderProductsArnold(ARenderProducts): # AOVs > Legacy > Maya Render View > Mode aovs_enabled = bool( - self._get_attr("defaultArnoldRenderOptions.aovMode") + self._get_attr( + "defaultArnoldRenderOptions.aovMode", as_string=False) ) if not aovs_enabled: return beauty_products @@ -1133,9 +1134,24 @@ class RenderProductsRedshift(ARenderProducts): aovs = list(set(aovs) - set(ref_aovs)) products = [] + global_aov_enabled = bool( + self._get_attr("redshiftOptions.aovGlobalEnableMode", as_string=False) + ) + colorspace = lib.get_color_management_output_transform() + if not global_aov_enabled: + # only beauty output + for camera in cameras: + products.insert(0, + RenderProduct(productName="", + ext=ext, + multipart=self.multipart, + camera=camera, + colorspace=colorspace)) + return products + light_groups_enabled = False has_beauty_aov = False - colorspace = lib.get_color_management_output_transform() + for aov in aovs: enabled = self._get_attr(aov, "enabled") if not enabled: diff --git a/client/ayon_core/hosts/maya/api/lib_rendersettings.py b/client/ayon_core/hosts/maya/api/lib_rendersettings.py index b8a4d04a10..f9e243146a 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersettings.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersettings.py @@ -7,7 +7,7 @@ from ayon_core.lib import Logger from ayon_core.settings import get_project_settings from ayon_core.pipeline import CreatorError, get_current_project_name -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.hosts.maya.api.lib import reset_frame_range @@ -77,7 +77,8 @@ class RenderSettings(object): renderer = cmds.getAttr( 'defaultRenderGlobals.currentRenderer').lower() - asset_doc = get_current_project_asset() + folder_entity = get_current_folder_entity() + folder_attributes = folder_entity["attrib"] # project_settings/maya/create/CreateRender/aov_separator try: aov_separator = self._aov_chars[( @@ -101,8 +102,8 @@ class RenderSettings(object): else: print("{0} isn't a supported renderer to autoset settings.".format(renderer)) # noqa # TODO: handle not having res values in the doc - width = asset_doc["data"].get("resolutionWidth") - height = asset_doc["data"].get("resolutionHeight") + width = folder_attributes.get("resolutionWidth") + height = folder_attributes.get("resolutionHeight") if renderer == "arnold": # set renderer settings for Arnold from project settings diff --git a/client/ayon_core/hosts/maya/api/lib_rendersetup.py b/client/ayon_core/hosts/maya/api/lib_rendersetup.py index fb6dd13ce0..6dca8eb6dd 100644 --- a/client/ayon_core/hosts/maya/api/lib_rendersetup.py +++ b/client/ayon_core/hosts/maya/api/lib_rendersetup.py @@ -3,7 +3,7 @@ https://github.com/Colorbleed/colorbleed-config/blob/acre/colorbleed/maya/lib_rendersetup.py Credits: Roy Nieterau (BigRoy) / Colorbleed -Modified for use in OpenPype +Modified for use in AYON """ @@ -77,7 +77,7 @@ def get_rendersetup_layer(layer): if conn.endswith(".legacyRenderLayer")), None) -def get_attr_in_layer(node_attr, layer): +def get_attr_in_layer(node_attr, layer, as_string=True): """Return attribute value in Render Setup layer. This will only work for attributes which can be @@ -124,7 +124,7 @@ def get_attr_in_layer(node_attr, layer): node = history_overrides[-1] if history_overrides else override node_attr_ = node + ".original" - return get_attribute(node_attr_, asString=True) + return get_attribute(node_attr_, asString=as_string) layer = get_rendersetup_layer(layer) rs = renderSetup.instance() @@ -144,7 +144,7 @@ def get_attr_in_layer(node_attr, layer): # we will let it error out. rs.switchToLayer(current_layer) - return get_attribute(node_attr, asString=True) + return get_attribute(node_attr, asString=as_string) overrides = get_attr_overrides(node_attr, layer) default_layer_value = get_default_layer_value(node_attr) diff --git a/client/ayon_core/hosts/maya/api/menu.py b/client/ayon_core/hosts/maya/api/menu.py index 70347e91b6..e3ef50cdc0 100644 --- a/client/ayon_core/hosts/maya/api/menu.py +++ b/client/ayon_core/hosts/maya/api/menu.py @@ -1,4 +1,5 @@ import os +import json import logging from functools import partial @@ -8,7 +9,7 @@ import maya.utils import maya.cmds as cmds from ayon_core.pipeline import ( - get_current_asset_name, + get_current_folder_path, get_current_task_name, registered_host ) @@ -43,14 +44,14 @@ def _get_menu(menu_name=None): def get_context_label(): return "{}, {}".format( - get_current_asset_name(), + get_current_folder_path(), get_current_task_name() ) def install(project_settings): if cmds.about(batch=True): - log.info("Skipping openpype.menu initialization in batch mode..") + log.info("Skipping AYON menu initialization in batch mode..") return def add_menu(): @@ -214,8 +215,18 @@ def install(project_settings): ) return - config = project_settings["maya"]["scriptsmenu"]["definition"] - _menu = project_settings["maya"]["scriptsmenu"]["name"] + menu_settings = project_settings["maya"]["scriptsmenu"] + menu_name = menu_settings["name"] + config = menu_settings["definition"] + + if menu_settings.get("definition_type") == "definition_json": + data = menu_settings["definition_json"] + try: + config = json.loads(data) + except json.JSONDecodeError as exc: + print("Skipping studio menu, error decoding JSON definition.") + log.error(exc) + return if not config: log.warning("Skipping studio menu, no definition found.") @@ -223,8 +234,8 @@ def install(project_settings): # run the launcher for Maya menu studio_menu = launchformaya.main( - title=_menu.title(), - objectName=_menu.title().lower().replace(" ", "_") + title=menu_name.title(), + objectName=menu_name.title().lower().replace(" ", "_") ) # apply configuration @@ -261,7 +272,7 @@ def popup(): def update_menu_task_label(): - """Update the task label in Avalon menu to current session""" + """Update the task label in AYON menu to current session""" if IS_HEADLESS: return diff --git a/client/ayon_core/hosts/maya/api/pipeline.py b/client/ayon_core/hosts/maya/api/pipeline.py index 90fb2e5888..864a0c1599 100644 --- a/client/ayon_core/hosts/maya/api/pipeline.py +++ b/client/ayon_core/hosts/maya/api/pipeline.py @@ -67,6 +67,9 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") AVALON_CONTAINERS = ":AVALON_CONTAINERS" +# Track whether the workfile tool is about to save +_about_to_save = False + class MayaHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "maya" @@ -361,13 +364,13 @@ def parse_container(container): def _ls(): - """Yields Avalon container node names. + """Yields AYON container node names. Used by `ls()` to retrieve the nodes and then query the full container's data. Yields: - str: Avalon container node name (objectSet) + str: AYON container node name (objectSet) """ @@ -384,7 +387,7 @@ def _ls(): } # Iterate over all 'set' nodes in the scene to detect whether - # they have the avalon container ".id" attribute. + # they have the ayon container ".id" attribute. fn_dep = om.MFnDependencyNode() iterator = om.MItDependencyNodes(om.MFn.kSet) for mobject in _maya_iterate(iterator): @@ -449,7 +452,7 @@ def containerise(name, ("name", name), ("namespace", namespace), ("loader", loader), - ("representation", context["representation"]["_id"]), + ("representation", context["representation"]["id"]), ] for key, value in data: @@ -577,10 +580,15 @@ def on_save(): _remove_workfile_lock() # Generate ids of the current context on nodes in the scene - nodes = lib.get_id_required_nodes(referenced_nodes=False) + nodes = lib.get_id_required_nodes(referenced_nodes=False, + existing_ids=False) for node, new_id in lib.generate_ids(nodes): lib.set_id(node, new_id, overwrite=False) + # We are now starting the actual save directly + global _about_to_save + _about_to_save = False + def on_open(): """On scene open let's assume the containers have changed.""" @@ -588,7 +596,7 @@ def on_open(): from ayon_core.tools.utils import SimplePopup # Validate FPS after update_task_from_path to - # ensure it is using correct FPS for the asset + # ensure it is using correct FPS for the folder lib.validate_fps() lib.fix_incompatible_containers() @@ -646,9 +654,10 @@ def on_task_changed(): "Can't set project for new context because path does not exist: {}" ).format(workdir)) - with lib.suspended_refresh(): - lib.set_context_settings() - lib.update_content_on_context_change() + global _about_to_save + if not lib.IS_HEADLESS and _about_to_save: + # Let's prompt the user to update the context settings or not + lib.prompt_reset_context() def before_workfile_open(): @@ -664,6 +673,9 @@ def before_workfile_save(event): if workdir_path: create_workspace_mel(workdir_path, project_name) + global _about_to_save + _about_to_save = True + def workfile_save_before_xgen(event): """Manage Xgen external files when switching context. @@ -673,7 +685,7 @@ def workfile_save_before_xgen(event): switching context. Args: - event (Event) - openpype/lib/events.py + event (Event) - ayon_core/lib/events.py """ if not cmds.pluginInfo("xgenToolkit", query=True, loaded=True): return diff --git a/client/ayon_core/hosts/maya/api/plugin.py b/client/ayon_core/hosts/maya/api/plugin.py index 1d72353116..6f8b74c906 100644 --- a/client/ayon_core/hosts/maya/api/plugin.py +++ b/client/ayon_core/hosts/maya/api/plugin.py @@ -4,6 +4,7 @@ from abc import ABCMeta import qargparse import six +import ayon_api from maya import cmds from maya.app.renderSetup.model import renderSetup @@ -28,7 +29,6 @@ from ayon_core.pipeline import ( get_current_project_name, ) from ayon_core.pipeline.load import LoadError -from ayon_core.client import get_asset_by_name from ayon_core.pipeline.create import get_product_name from . import lib @@ -454,17 +454,23 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase): # this instance will not have the `instance_node` data yet # until it's been saved/persisted at least once. project_name = self.create_context.get_current_project_name() - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() + task_name = self.create_context.get_current_task_name() instance_data = { - "folderPath": asset_name, - "task": self.create_context.get_current_task_name(), + "folderPath": folder_path, + "task": task_name, "variant": layer.name(), } - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - instance_data["task"], + folder_entity, + task_entity, layer.name(), host_name, ) @@ -578,8 +584,8 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase): def get_product_name( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name=None, instance=None @@ -587,13 +593,22 @@ class RenderlayerCreator(NewCreator, MayaCreatorBase): if host_name is None: host_name = self.create_context.host_name dynamic_data = self.get_dynamic_data( - project_name, asset_doc, task_name, variant, host_name, instance + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ) + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] # creator.product_type != 'render' as expected return get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, self.layer_instance_prefix or self.product_type, variant, @@ -668,21 +683,19 @@ class Loader(LoaderPlugin): self.log.debug("No custom group_name, no group will be created.") options["attach_to_root"] = False - asset_doc = context["asset"] - subset_doc = context["subset"] - product_type = ( - subset_doc["data"].get("family") - or subset_doc["data"]["families"][0] - ) + folder_entity = context["folder"] + product_entity = context["product"] + product_name = product_entity["name"] + product_type = product_entity["productType"] formatting_data = { - "asset_name": asset_doc["name"], - "asset_type": asset_doc["type"], + "asset_name": folder_entity["name"], + "asset_type": "asset", "folder": { - "name": asset_doc["name"], + "name": folder_entity["name"], }, - "subset": subset_doc["name"], + "subset": product_name, "product": { - "name": subset_doc["name"], + "name": product_name, "type": product_type, }, "family": product_type @@ -793,14 +806,17 @@ class ReferenceLoader(Loader): """To be implemented by subclass""" raise NotImplementedError("Must be implemented by subclass") - def update(self, container, representation): + def update(self, container, context): from maya import cmds from ayon_core.hosts.maya.api.lib import get_container_members node = container["objectName"] - path = get_representation_path(representation) + project_name = context["project"]["name"] + repre_entity = context["representation"] + + path = get_representation_path(repre_entity) # Get reference node from container members members = get_container_members(node) @@ -813,9 +829,9 @@ class ReferenceLoader(Loader): "abc": "Alembic", "fbx": "FBX", "usd": "USD Import" - }.get(representation["name"]) + }.get(repre_entity["name"]) - assert file_type, "Unsupported representation: %s" % representation + assert file_type, "Unsupported representation: %s" % repre_entity assert os.path.exists(path), "%s does not exist." % path @@ -823,7 +839,7 @@ class ReferenceLoader(Loader): # them to incoming data. alembic_attrs = ["speed", "offset", "cycleType", "time"] alembic_data = {} - if representation["name"] == "abc": + if repre_entity["name"] == "abc": alembic_nodes = cmds.ls( "{}:*".format(namespace), type="AlembicNode" ) @@ -840,10 +856,7 @@ class ReferenceLoader(Loader): self.log.debug("No alembic nodes found in {}".format(members)) try: - path = self.prepare_root_value(path, - representation["context"] - ["project"] - ["name"]) + path = self.prepare_root_value(path, project_name) content = cmds.file(path, loadReference=reference_node, type=file_type, @@ -867,7 +880,7 @@ class ReferenceLoader(Loader): self._organize_containers(content, container["objectName"]) # Reapply alembic settings. - if representation["name"] == "abc" and alembic_data: + if repre_entity["name"] == "abc" and alembic_data: alembic_nodes = cmds.ls( "{}:*".format(namespace), type="AlembicNode" ) @@ -886,7 +899,7 @@ class ReferenceLoader(Loader): cmds.disconnectAttr(input, node_attr) cmds.setAttr(node_attr, data["value"]) - # Fix PLN-40 for older containers created with Avalon that had the + # Fix PLN-40 for older containers created with AYON that had the # `.verticesOnlySet` set to True. if cmds.getAttr("{}.verticesOnlySet".format(node)): self.log.info("Setting %s.verticesOnlySet to False", node) @@ -901,7 +914,7 @@ class ReferenceLoader(Loader): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") # When an animation or pointcache gets connected to an Xgen container, diff --git a/client/ayon_core/hosts/maya/api/render_setup_tools.py b/client/ayon_core/hosts/maya/api/render_setup_tools.py index a6b46e1e9a..9b00b53eee 100644 --- a/client/ayon_core/hosts/maya/api/render_setup_tools.py +++ b/client/ayon_core/hosts/maya/api/render_setup_tools.py @@ -5,7 +5,7 @@ Export Maya nodes from Render Setup layer as if flattened in that layer instead of exporting the defaultRenderLayer as Maya forces by default Credits: Roy Nieterau (BigRoy) / Colorbleed -Modified for use in OpenPype +Modified for use in AYON """ @@ -19,7 +19,7 @@ from .lib import pairwise @contextlib.contextmanager -def _allow_export_from_render_setup_layer(): +def allow_export_from_render_setup_layer(): """Context manager to override Maya settings to allow RS layer export""" try: @@ -102,7 +102,7 @@ def export_in_rs_layer(path, nodes, export=None): cmds.disconnectAttr(src, dest) # Export Selected - with _allow_export_from_render_setup_layer(): + with allow_export_from_render_setup_layer(): cmds.select(nodes, noExpand=True) if export: export() diff --git a/client/ayon_core/hosts/maya/api/setdress.py b/client/ayon_core/hosts/maya/api/setdress.py index 8d09716bf6..b1d5beb343 100644 --- a/client/ayon_core/hosts/maya/api/setdress.py +++ b/client/ayon_core/hosts/maya/api/setdress.py @@ -6,16 +6,10 @@ import contextlib import copy import six +import ayon_api from maya import cmds -from ayon_core.client import ( - get_version_by_name, - get_last_version_by_subset_id, - get_representation_by_id, - get_representation_by_name, - get_representation_parents, -) from ayon_core.pipeline import ( schema, discover_loader_plugins, @@ -156,7 +150,7 @@ def load_package(filepath, name, namespace=None): containers.append(container) # TODO: Do we want to cripple? Or do we want to add a 'parent' parameter? - # Cripple the original avalon containers so they don't show up in the + # Cripple the original AYON containers so they don't show up in the # manager # for container in containers: # cmds.setAttr("%s.id" % container, @@ -181,7 +175,7 @@ def _add(instance, representation_id, loaders, namespace, root="|"): namespace (str): Returns: - str: The created Avalon container. + str: The created AYON container. """ @@ -250,7 +244,7 @@ def _instances_by_namespace(data): def get_contained_containers(container): - """Get the Avalon containers in this container + """Get the AYON containers in this container Args: container (dict): The container dict. @@ -262,7 +256,7 @@ def get_contained_containers(container): from .pipeline import parse_container - # Get avalon containers in this package setdress container + # Get AYON containers in this package setdress container containers = [] members = cmds.sets(container['objectName'], query=True) for node in cmds.ls(members, type="objectSet"): @@ -290,41 +284,54 @@ def update_package_version(container, version): # Versioning (from `core.maya.pipeline`) project_name = get_current_project_name() - current_representation = get_representation_by_id( - project_name, container["representation"] + repre_id = container["representation"] + current_representation = ayon_api.get_representation_by_id( + project_name, repre_id ) assert current_representation is not None, "This is a bug" - version_doc, subset_doc, asset_doc, project_doc = ( - get_representation_parents(project_name, current_representation) - ) + ( + version_entity, + product_entity, + folder_entity, + project_entity + ) = ayon_api.get_representation_parents(project_name, repre_id) if version == -1: - new_version = get_last_version_by_subset_id( - project_name, subset_doc["_id"] + new_version = ayon_api.get_last_version_by_product_id( + project_name, product_entity["id"] ) else: - new_version = get_version_by_name( - project_name, version, subset_doc["_id"] + new_version = ayon_api.get_version_by_name( + project_name, version, product_entity["id"] ) - assert new_version is not None, "This is a bug" + if new_version is None: + raise ValueError("Version not found: {}".format(version)) # Get the new representation (new file) - new_representation = get_representation_by_name( - project_name, current_representation["name"], new_version["_id"] + new_representation = ayon_api.get_representation_by_name( + project_name, current_representation["name"], new_version["id"] ) - - update_package(container, new_representation) + # TODO there is 'get_representation_context' to get the context which + # could be possible to use here + new_context = { + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, + "representation": new_representation, + } + update_package(container, new_context) -def update_package(set_container, representation): +def update_package(set_container, context): """Update any matrix changes in the scene based on the new data Args: set_container (dict): container data from `ls()` - representation (dict): the representation document from the database + context (dict): the representation document from the database Returns: None @@ -332,8 +339,9 @@ def update_package(set_container, representation): """ # Load the original package data - project_name = get_current_project_name() - current_representation = get_representation_by_id( + project_name = context["project"]["name"] + repre_entity = context["representation"] + current_representation = ayon_api.get_representation_by_id( project_name, set_container["representation"] ) @@ -343,7 +351,7 @@ def update_package(set_container, representation): current_data = json.load(fp) # Load the new package data - new_file = get_representation_path(representation) + new_file = get_representation_path(repre_entity) assert new_file.endswith(".json") with open(new_file, "r") as fp: new_data = json.load(fp) @@ -354,7 +362,7 @@ def update_package(set_container, representation): # TODO: This should be handled by the pipeline itself cmds.setAttr(set_container['objectName'] + ".representation", - str(representation['_id']), type="string") + context["representation"]["id"], type="string") def update_scene(set_container, containers, current_data, new_data, new_file): @@ -401,6 +409,8 @@ def update_scene(set_container, containers, current_data, new_data, new_file): new_lookup = _instances_by_namespace(new_data) old_lookup = _instances_by_namespace(current_data) + repre_ids = set() + containers_for_repre_compare = [] for container in containers: container_ns = container['namespace'] @@ -409,98 +419,121 @@ def update_scene(set_container, containers, current_data, new_data, new_file): processed_namespaces.add(container_ns) processed_containers.append(container['objectName']) - if container_ns in new_lookup: - root = get_container_transforms(container, root=True) - if not root: - log.error("Can't find root for %s", container['objectName']) - continue - - old_instance = old_lookup.get(container_ns, {}) - new_instance = new_lookup[container_ns] - - # Update the matrix - # check matrix against old_data matrix to find local overrides - current_matrix = cmds.xform(root, - query=True, - matrix=True, - objectSpace=True) - - original_matrix = old_instance.get("matrix", identity) - has_matrix_override = not matrix_equals(current_matrix, - original_matrix) - - if has_matrix_override: - log.warning("Matrix override preserved on %s", container_ns) - else: - new_matrix = new_instance.get("matrix", identity) - cmds.xform(root, matrix=new_matrix, objectSpace=True) - - # Update the parenting - if old_instance.get("parent", None) != new_instance["parent"]: - - parent = to_namespace(new_instance['parent'], set_namespace) - if not cmds.objExists(parent): - log.error("Can't find parent %s", parent) - continue - - # Set the new parent - cmds.lockNode(root, lock=False) - root = cmds.parent(root, parent, relative=True) - cmds.lockNode(root, lock=True) - - # Update the representation - representation_current = container['representation'] - representation_old = old_instance['representation'] - representation_new = new_instance['representation'] - has_representation_override = (representation_current != - representation_old) - - if representation_new != representation_current: - - if has_representation_override: - log.warning("Your scene had local representation " - "overrides within the set. New " - "representations not loaded for %s.", - container_ns) - continue - - # We check it against the current 'loader' in the scene instead - # of the original data of the package that was loaded because - # an Artist might have made scene local overrides - if new_instance['loader'] != container['loader']: - log.warning("Loader is switched - local edits will be " - "lost. Removing: %s", - container_ns) - - # Remove this from the "has been processed" list so it's - # considered as new element and added afterwards. - processed_containers.pop() - processed_namespaces.remove(container_ns) - remove_container(container) - continue - - # Check whether the conversion can be done by the Loader. - # They *must* use the same asset, product and Loader for - # `update_container` to make sense. - old = get_representation_by_id( - project_name, representation_current - ) - new = get_representation_by_id( - project_name, representation_new - ) - is_valid = compare_representations(old=old, new=new) - if not is_valid: - log.error("Skipping: %s. See log for details.", - container_ns) - continue - - new_version = new["context"]["version"] - update_container(container, version=new_version) - - else: + if container_ns not in new_lookup: # Remove this container because it's not in the new data log.warning("Removing content: %s", container_ns) remove_container(container) + continue + + root = get_container_transforms(container, root=True) + if not root: + log.error("Can't find root for %s", container['objectName']) + continue + + old_instance = old_lookup.get(container_ns, {}) + new_instance = new_lookup[container_ns] + + # Update the matrix + # check matrix against old_data matrix to find local overrides + current_matrix = cmds.xform(root, + query=True, + matrix=True, + objectSpace=True) + + original_matrix = old_instance.get("matrix", identity) + has_matrix_override = not matrix_equals(current_matrix, + original_matrix) + + if has_matrix_override: + log.warning("Matrix override preserved on %s", container_ns) + else: + new_matrix = new_instance.get("matrix", identity) + cmds.xform(root, matrix=new_matrix, objectSpace=True) + + # Update the parenting + if old_instance.get("parent", None) != new_instance["parent"]: + + parent = to_namespace(new_instance['parent'], set_namespace) + if not cmds.objExists(parent): + log.error("Can't find parent %s", parent) + continue + + # Set the new parent + cmds.lockNode(root, lock=False) + root = cmds.parent(root, parent, relative=True) + cmds.lockNode(root, lock=True) + + # Update the representation + representation_current = container['representation'] + representation_old = old_instance['representation'] + representation_new = new_instance['representation'] + has_representation_override = (representation_current != + representation_old) + + if representation_new == representation_current: + continue + + if has_representation_override: + log.warning("Your scene had local representation " + "overrides within the set. New " + "representations not loaded for %s.", + container_ns) + continue + + # We check it against the current 'loader' in the scene instead + # of the original data of the package that was loaded because + # an Artist might have made scene local overrides + if new_instance['loader'] != container['loader']: + log.warning("Loader is switched - local edits will be " + "lost. Removing: %s", + container_ns) + + # Remove this from the "has been processed" list so it's + # considered as new element and added afterwards. + processed_containers.pop() + processed_namespaces.remove(container_ns) + remove_container(container) + continue + + # Check whether the conversion can be done by the Loader. + # They *must* use the same folder, product and Loader for + # `update_container` to make sense. + repre_ids.add(representation_current) + repre_ids.add(representation_new) + + containers_for_repre_compare.append( + (container, representation_current, representation_new) + ) + + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in ayon_api.get_representations( + project_name, representation_ids=repre_ids + ) + } + repre_parents_by_id = ayon_api.get_representations_parents( + project_name, repre_ids + ) + for ( + container, + repre_current_id, + repre_new_id + ) in containers_for_repre_compare: + current_repre = repre_entities_by_id[repre_current_id] + current_parents = repre_parents_by_id[repre_current_id] + new_repre = repre_entities_by_id[repre_new_id] + new_parents = repre_parents_by_id[repre_new_id] + + is_valid = compare_representations( + current_repre, current_parents, new_repre, new_parents + ) + if not is_valid: + log.error("Skipping: %s. See log for details.", + container["namespace"]) + continue + + new_version = new_parents.version["version"] + update_container(container, version=new_version) # Add new assets all_loaders = discover_loader_plugins() @@ -531,43 +564,43 @@ def update_scene(set_container, containers, current_data, new_data, new_file): return processed_containers -def compare_representations(old, new): +def compare_representations( + current_repre, current_parents, new_repre, new_parents +): """Check if the old representation given can be updated Due to limitations of the `update_container` function we cannot allow differences in the following data: * Representation name (extension) - * Asset name - * Subset name (variation) + * Folder id + * Product id If any of those data values differs, the function will raise an RuntimeError Args: - old(dict): representation data from the database - new(dict): representation data from the database + current_repre (dict[str, Any]): Current representation entity. + current_parents (RepresentationParents): Current + representation parents. + new_repre (dict[str, Any]): New representation entity. + new_parents (RepresentationParents): New representation parents. Returns: bool: False if the representation is not invalid else True - """ - if new["name"] != old["name"]: + """ + if current_repre["name"] != new_repre["name"]: log.error("Cannot switch extensions") return False - new_context = new["context"] - old_context = old["context"] - # TODO add better validation e.g. based on parent ids - if new_context["asset"] != old_context["asset"]: - log.error("Changing assets between updates is " - "not supported.") + if current_parents.folder["id"] != new_parents.folder["id"]: + log.error("Changing folders between updates is not supported.") return False - if new_context["subset"] != old_context["subset"]: - log.error("Changing products between updates is " - "not supported.") + if current_parents.product["id"] != new_parents.product["id"]: + log.error("Changing products between updates is not supported.") return False return True diff --git a/client/ayon_core/hosts/maya/api/workfile_template_builder.py b/client/ayon_core/hosts/maya/api/workfile_template_builder.py index 6ae2a075e3..75386d7e64 100644 --- a/client/ayon_core/hosts/maya/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/maya/api/workfile_template_builder.py @@ -4,7 +4,7 @@ from maya import cmds from ayon_core.pipeline import ( registered_host, - get_current_asset_name, + get_current_folder_path, AYON_INSTANCE_ID, AVALON_INSTANCE_ID, ) @@ -74,7 +74,7 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): return True # update imported sets information - asset_name = get_current_asset_name() + folder_path = get_current_folder_path() for node in imported_sets: if not cmds.attributeQuery("id", node=node, exists=True): continue @@ -82,11 +82,11 @@ class MayaTemplateBuilder(AbstractTemplateBuilder): AYON_INSTANCE_ID, AVALON_INSTANCE_ID }: continue - if not cmds.attributeQuery("asset", node=node, exists=True): + if not cmds.attributeQuery("folderPath", node=node, exists=True): continue cmds.setAttr( - "{}.asset".format(node), asset_name, type="string") + "{}.folderPath".format(node), folder_path, type="string") return True @@ -286,7 +286,7 @@ class MayaPlaceholderLoadPlugin(PlaceholderPlugin, PlaceholderLoadMixin): if not container: return - roots = cmds.sets(container, q=True) + roots = cmds.sets(container, q=True) or [] ref_node = None try: ref_node = get_reference_node(roots) diff --git a/client/ayon_core/hosts/maya/api/yeti.py b/client/ayon_core/hosts/maya/api/yeti.py new file mode 100644 index 0000000000..1526c3a2f3 --- /dev/null +++ b/client/ayon_core/hosts/maya/api/yeti.py @@ -0,0 +1,101 @@ +from typing import List + +from maya import cmds + + +def get_yeti_user_variables(yeti_shape_node: str) -> List[str]: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + + Returns: + list: Attribute names (for a vector attribute it only lists the top + parent attribute, not the attribute per axis) + """ + + attrs = cmds.listAttr(yeti_shape_node, + userDefined=True, + string=("yetiVariableV_*", + "yetiVariableF_*")) or [] + valid_attrs = [] + for attr in attrs: + attr_type = cmds.attributeQuery(attr, node=yeti_shape_node, + attributeType=True) + if attr.startswith("yetiVariableV_") and attr_type == "double3": + # vector + valid_attrs.append(attr) + elif attr.startswith("yetiVariableF_") and attr_type == "double": + valid_attrs.append(attr) + + return valid_attrs + + +def create_yeti_variable(yeti_shape_node: str, + attr_name: str, + value=None, + force_value: bool = False) -> bool: + """Get user defined yeti user variables for a `pgYetiMaya` shape node. + + Arguments: + yeti_shape_node (str): The `pgYetiMaya` shape node. + attr_name (str): The fully qualified yeti variable name, e.g. + "yetiVariableF_myfloat" or "yetiVariableV_myvector" + value (object): The value to set (must match the type of the attribute) + When value is None it will ignored and not be set. + force_value (bool): Whether to set the value if the attribute already + exists or not. + + Returns: + bool: Whether the attribute value was set or not. + + """ + exists = cmds.attributeQuery(attr_name, node=yeti_shape_node, exists=True) + if not exists: + if attr_name.startswith("yetiVariableV_"): + _create_vector_yeti_user_variable(yeti_shape_node, attr_name) + if attr_name.startswith("yetiVariableF_"): + _create_float_yeti_user_variable(yeti_shape_node, attr_name) + + if value is not None and (not exists or force_value): + plug = "{}.{}".format(yeti_shape_node, attr_name) + if ( + isinstance(value, (list, tuple)) + and attr_name.startswith("yetiVariableV_") + ): + cmds.setAttr(plug, *value, type="double3") + else: + cmds.setAttr(plug, value) + + return True + return False + + +def _create_vector_yeti_user_variable(yeti_shape_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableV_"): + raise ValueError("Must start with yetiVariableV_") + cmds.addAttr(yeti_shape_node, + longName=attr_name, + attributeType="double3", + cachedInternally=True, + keyable=True) + for axis in "XYZ": + cmds.addAttr(yeti_shape_node, + longName="{}{}".format(attr_name, axis), + attributeType="double", + parent=attr_name, + cachedInternally=True, + keyable=True) + + +def _create_float_yeti_user_variable(yeti_node: str, attr_name: str): + if not attr_name.startswith("yetiVariableF_"): + raise ValueError("Must start with yetiVariableF_") + + cmds.addAttr(yeti_node, + longName=attr_name, + attributeType="double", + cachedInternally=True, + softMinValue=0, + softMaxValue=100, + keyable=True) diff --git a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py index ed294da125..45785ac354 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py +++ b/client/ayon_core/hosts/maya/hooks/pre_auto_load_plugins.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreAutoLoadPlugins(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py index 03ca8661bd..683b4c59c7 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py +++ b/client/ayon_core/hosts/maya/hooks/pre_copy_mel.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.maya.lib import create_workspace_mel @@ -11,11 +11,13 @@ class PreCopyMel(PreLaunchHook): launch_types = {LaunchTypes.local} def execute(self): - project_doc = self.data["project_doc"] + project_entity = self.data["project_entity"] workdir = self.launch_context.env.get("AYON_WORKDIR") if not workdir: self.log.warning("BUG: Workdir is not filled.") return project_settings = self.data["project_settings"] - create_workspace_mel(workdir, project_doc["name"], project_settings) + create_workspace_mel( + workdir, project_entity["name"], project_settings + ) diff --git a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py index 6bf678474f..a54f17c6c6 100644 --- a/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py +++ b/client/ayon_core/hosts/maya/hooks/pre_open_workfile_post_initialization.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class MayaPreOpenWorkfilePostInitialization(PreLaunchHook): diff --git a/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py b/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py index b23c56fc5b..81cf9613b4 100644 --- a/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/maya/plugins/create/convert_legacy.py @@ -1,14 +1,14 @@ -from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin +import ayon_api + +from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin from ayon_core.hosts.maya.api import plugin from ayon_core.hosts.maya.api.lib import read -from ayon_core.client import get_asset_by_name - from maya import cmds from maya.app.renderSetup.model import renderSetup -class MayaLegacyConvertor(SubsetConvertorPlugin, +class MayaLegacyConvertor(ProductConvertorPlugin, plugin.MayaCreatorBase): """Find and convert any legacy products in the scene. @@ -19,7 +19,7 @@ class MayaLegacyConvertor(SubsetConvertorPlugin, Its limitation is that you can have multiple creators creating product of the same type and there is no way to handle it. This code should - nevertheless cover all creators that came with OpenPype. + nevertheless cover all creators that came with AYON. """ identifier = "io.openpype.creators.maya.legacy" @@ -83,7 +83,7 @@ class MayaLegacyConvertor(SubsetConvertorPlugin, ).format(product_type)) continue - creator_id = product_type_to_id[family] + creator_id = product_type_to_id[product_type] creator = self.create_context.creators[creator_id] data["creator_identifier"] = creator_id @@ -142,12 +142,21 @@ class MayaLegacyConvertor(SubsetConvertorPlugin, # recreate product name as without it would be # `renderingMain` vs correct `renderMain` project_name = self.create_context.get_current_project_name() - asset_doc = get_asset_by_name(project_name, - original_data["asset"]) + folder_entities = list(ayon_api.get_folders( + project_name, folder_names=[original_data["asset"]] + )) + if not folder_entities: + cmds.delete(instance_node) + continue + folder_entity = folder_entities[0] + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], data["task"] + ) + product_name = creator.get_product_name( project_name, - asset_doc, - data["task"], + folder_entity, + task_entity, original_data["variant"], ) original_data["productName"] = product_name diff --git a/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py b/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py index e7b903312f..7216236719 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_multishot_layout.py @@ -1,16 +1,18 @@ +import collections + from ayon_api import ( get_folder_by_name, get_folder_by_path, get_folders, + get_tasks, ) from maya import cmds # noqa: F401 -from ayon_core.client import get_assets from ayon_core.hosts.maya.api import plugin from ayon_core.lib import BoolDef, EnumDef, TextDef from ayon_core.pipeline import ( Creator, - get_current_asset_name, + get_current_folder_path, get_current_project_name, ) from ayon_core.pipeline.create import CreatorError @@ -39,13 +41,13 @@ class CreateMultishotLayout(plugin.MayaCreator): Todo: `get_folder_by_name` should be switched to `get_folder_by_path` once the fork to pure AYON is done. - Warning: this will not work for projects where the asset name + Warning: this will not work for projects where the folder name is not unique across the project until the switch mentioned above is done. """ project_name = get_current_project_name() - folder_path = get_current_asset_name() + folder_path = get_current_folder_path() if "/" in folder_path: current_folder = get_folder_by_path(project_name, folder_path) else: @@ -128,10 +130,18 @@ class CreateMultishotLayout(plugin.MayaCreator): raise CreatorError( f"Creator {layout_creator_id} not found.") - # Get OpenPype style asset documents for the shots - op_asset_docs = get_assets( - self.project_name, [s["id"] for s in shots]) - asset_docs_by_id = {doc["_id"]: doc for doc in op_asset_docs} + folder_ids = {s["id"] for s in shots} + folder_entities = get_folders(self.project_name, folder_ids) + task_entities = get_tasks( + self.project_name, folder_ids=folder_ids + ) + task_entities_by_folder_id = collections.defaultdict(dict) + for task_entity in task_entities: + folder_id = task_entity["folderId"] + task_name = task_entity["name"] + task_entities_by_folder_id[folder_id][task_name] = task_entity + + folder_entities_by_id = {fe["id"]: fe for fe in folder_entities} for shot in shots: # we are setting shot name to be displayed in the sequencer to # `shot name (shot label)` if the label is set, otherwise just @@ -141,12 +151,15 @@ class CreateMultishotLayout(plugin.MayaCreator): continue # get task for shot - asset_doc = asset_docs_by_id[shot["id"]] + folder_id = shot["id"] + folder_entity = folder_entities_by_id[folder_id] + task_entities = task_entities_by_folder_id[folder_id] - tasks = asset_doc.get("data").get("tasks").keys() - layout_task = None - if pre_create_data["taskName"] in tasks: - layout_task = pre_create_data["taskName"] + layout_task_name = None + layout_task_entity = None + if pre_create_data["taskName"] in task_entities: + layout_task_name = pre_create_data["taskName"] + layout_task_entity = task_entities[layout_task_name] shot_name = f"{shot['name']}%s" % ( f" ({shot['label']})" if shot["label"] else "") @@ -160,14 +173,14 @@ class CreateMultishotLayout(plugin.MayaCreator): "folderPath": shot["path"], "variant": layout_creator.get_default_variant() } - if layout_task: - instance_data["task"] = layout_task + if layout_task_name: + instance_data["task"] = layout_task_name layout_creator.create( product_name=layout_creator.get_product_name( self.project_name, - asset_doc, - self.create_context.get_current_task_name(), + folder_entity, + layout_task_entity, layout_creator.get_default_variant(), ), instance_data=instance_data, @@ -177,7 +190,7 @@ class CreateMultishotLayout(plugin.MayaCreator): ) def get_related_shots(self, folder_path: str): - """Get all shots related to the current asset. + """Get all shots related to the current folder. Get all folders of type Shot under specified folder. diff --git a/client/ayon_core/hosts/maya/plugins/create/create_render.py b/client/ayon_core/hosts/maya/plugins/create/create_render.py index 213d5b543e..e5a8d4dbd8 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_render.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_render.py @@ -40,8 +40,15 @@ class CreateRenderlayer(plugin.RenderlayerCreator): def create(self, product_name, instance_data, pre_create_data): # Only allow a single render instance to exist if self._get_singleton_node(): - raise CreatorError("A Render instance already exists - only " - "one can be configured.") + raise CreatorError( + "A Render instance already exists - only one can be " + "configured.\n\n" + "To render multiple render layers, create extra Render Setup " + "Layers via Maya's Render Setup UI.\n" + "Then refresh the publisher to detect the new layers for " + "rendering.\n\n" + "With a render instance present all Render Setup layers in " + "your workfile are renderable instances.") # Apply default project render settings on create if self.render_settings.get("apply_render_settings"): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_review.py b/client/ayon_core/hosts/maya/plugins/create/create_review.py index c4fa045427..8a2f2df745 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_review.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_review.py @@ -1,6 +1,7 @@ import json from maya import cmds +import ayon_api from ayon_core.hosts.maya.api import ( lib, @@ -12,7 +13,6 @@ from ayon_core.lib import ( EnumDef ) from ayon_core.pipeline import CreatedInstance -from ayon_core.client import get_asset_by_name TRANSPARENCIES = [ "preset", @@ -43,12 +43,17 @@ class CreateReview(plugin.MayaCreator): members = cmds.ls(selection=True) project_name = self.project_name - asset_name = instance_data["folderPath"] - asset_doc = get_asset_by_name(project_name, asset_name) + folder_path = instance_data["folderPath"] task_name = instance_data["task"] + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name, fields={"taskType"} + ) preset = lib.get_capture_preset( task_name, - asset_doc["data"]["tasks"][task_name]["type"], + task_entity["taskType"], product_name, self.project_settings, self.log @@ -91,9 +96,9 @@ class CreateReview(plugin.MayaCreator): defs = lib.collect_animation_defs() - # Option for using Maya or asset frame range in settings. + # Option for using Maya or folder frame range in settings. if not self.useMayaTimeline: - # Update the defaults to be the asset frame range + # Update the defaults to be the folder frame range frame_range = lib.get_frame_range() defs_by_key = {attr_def.key: attr_def for attr_def in defs} for key, value in frame_range.items(): @@ -106,13 +111,13 @@ class CreateReview(plugin.MayaCreator): defs.extend([ NumberDef("review_width", label="Review width", - tooltip="A value of zero will use the asset resolution.", + tooltip="A value of zero will use the folder resolution.", decimals=0, minimum=0, default=0), NumberDef("review_height", label="Review height", - tooltip="A value of zero will use the asset resolution.", + tooltip="A value of zero will use the folder resolution.", decimals=0, minimum=0, default=0), diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index 8815c4d23d..a32e94971e 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -20,24 +20,28 @@ class CreateUnrealSkeletalMesh(plugin.MayaCreator): # Defined in settings joint_hints = set() - def apply_settings(self, project_settings): - """Apply project settings to creator""" - settings = ( - project_settings["maya"]["create"]["CreateUnrealSkeletalMesh"] - ) - self.joint_hints = set(settings.get("joint_hints", [])) - def get_dynamic_data( - self, project_name, asset_doc, task_name, variant, host_name, instance + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ): """ The default product name templates for Unreal include {asset} and thus we should pass that along as dynamic data. """ dynamic_data = super(CreateUnrealSkeletalMesh, self).get_dynamic_data( - project_name, asset_doc, task_name, variant, host_name, instance + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ) - dynamic_data["asset"] = asset_doc["name"] + dynamic_data["asset"] = folder_entity["name"] return dynamic_data def create(self, product_name, instance_data, pre_create_data): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py index 58ad1e4133..76c33f00cc 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_staticmesh.py @@ -15,22 +15,28 @@ class CreateUnrealStaticMesh(plugin.MayaCreator): # Defined in settings collision_prefixes = [] - def apply_settings(self, project_settings): - """Apply project settings to creator""" - settings = project_settings["maya"]["create"]["CreateUnrealStaticMesh"] - self.collision_prefixes = settings["collision_prefixes"] - def get_dynamic_data( - self, project_name, asset_doc, task_name, variant, host_name, instance + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ): """ The default product name templates for Unreal include {asset} and thus we should pass that along as dynamic data. """ dynamic_data = super(CreateUnrealStaticMesh, self).get_dynamic_data( - project_name, asset_doc, task_name, variant, host_name, instance + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ) - dynamic_data["asset"] = asset_doc["name"] + dynamic_data["asset"] = folder_entity["name"] return dynamic_data def create(self, product_name, instance_data, pre_create_data): diff --git a/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py b/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py index 1eac8a5ea9..dea64b40fb 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_unreal_yeticache.py @@ -5,7 +5,7 @@ from ayon_core.hosts.maya.api import ( from ayon_core.lib import NumberDef -class CreateYetiCache(plugin.MayaCreator): +class CreateUnrealYetiCache(plugin.MayaCreator): """Output for procedural plugin nodes of Yeti """ identifier = "io.openpype.creators.maya.unrealyeticache" diff --git a/client/ayon_core/hosts/maya/plugins/create/create_workfile.py b/client/ayon_core/hosts/maya/plugins/create/create_workfile.py index 5eb32e1c90..f636ed7b74 100644 --- a/client/ayon_core/hosts/maya/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/maya/plugins/create/create_workfile.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Creator plugin for creating workfiles.""" +import ayon_api + from ayon_core.pipeline import CreatedInstance, AutoCreator -from ayon_core.client import get_asset_by_name, get_asset_name_identifier from ayon_core.hosts.maya.api import plugin from maya import cmds @@ -25,34 +26,38 @@ class CreateWorkfile(plugin.MayaCreatorBase, AutoCreator): ), None) project_name = self.project_name - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() task_name = self.create_context.get_current_task_name() host_name = self.create_context.host_name - if current_instance is None: - current_instance_asset = None - else: - current_instance_asset = current_instance["folderPath"] + current_folder_path = None + if current_instance is not None: + current_folder_path = current_instance["folderPath"] if current_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": variant } data.update( self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, current_instance) @@ -63,21 +68,25 @@ class CreateWorkfile(plugin.MayaCreatorBase, AutoCreator): ) self._add_instance_to_context(current_instance) elif ( - current_instance_asset != asset_name + current_folder_path != folder_path or current_instance["task"] != task_name ): # Update instance context if is not the same - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) - asset_name = get_asset_name_identifier(asset_doc) - current_instance["folderPath"] = asset_name + current_instance["folderPath"] = folder_entity["path"] current_instance["task"] = task_name current_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py index 054c84bea2..5410546a2e 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_geometry.py @@ -1,6 +1,6 @@ from maya import cmds -from ayon_core.pipeline import InventoryAction, get_representation_context +from ayon_core.pipeline import InventoryAction, get_repres_contexts from ayon_core.hosts.maya.api.lib import get_id @@ -28,14 +28,19 @@ class ConnectGeometry(InventoryAction): # Categorize containers by family. containers_by_product_type = {} + repre_ids = { + container["representation"] + for container in containers + } + repre_contexts_by_id = get_repres_contexts(repre_ids) for container in containers: - product_type = get_representation_context( - container["representation"] - )["subset"]["data"]["family"] - try: - containers_by_product_type[product_type].append(container) - except KeyError: - containers_by_product_type[product_type] = [container] + repre_id = container["representation"] + repre_context = repre_contexts_by_id[repre_id] + + product_type = repre_context["product"]["productType"] + + containers_by_product_type.setdefault(product_type, []) + containers_by_product_type[product_type].append(container) # Validate to only 1 source container. source_containers = containers_by_product_type.get("animation", []) diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py index fa6440fc37..166c419072 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_xgen.py @@ -2,7 +2,9 @@ from maya import cmds import xgenm from ayon_core.pipeline import ( - InventoryAction, get_representation_context, get_representation_path + InventoryAction, + get_repres_contexts, + get_representation_path, ) @@ -25,14 +27,19 @@ class ConnectXgen(InventoryAction): # Categorize containers by product type. containers_by_product_type = {} + repre_ids = { + container["representation"] + for container in containers + } + repre_contexts_by_id = get_repres_contexts(repre_ids) for container in containers: - product_type = get_representation_context( - container["representation"] - )["subset"]["data"]["family"] - try: - containers_by_product_type[product_type].append(container) - except KeyError: - containers_by_product_type[product_type] = [container] + repre_id = container["representation"] + repre_context = repre_contexts_by_id[repre_id] + + product_type = repre_context["product"]["productType"] + + containers_by_product_type.setdefault(product_type, []) + containers_by_product_type[product_type].append(container) # Validate to only 1 source container. source_containers = containers_by_product_type.get("animation", []) @@ -51,13 +58,12 @@ class ConnectXgen(InventoryAction): return source_container = source_containers[0] + source_repre_id = source_container["representation"] source_object = source_container["objectName"] # Validate source representation is an alembic. source_path = get_representation_path( - get_representation_context( - source_container["representation"] - )["representation"] + repre_contexts_by_id[source_repre_id]["representation"] ).replace("\\", "/") message = "Animation container \"{}\" is not an alembic:\n{}".format( source_container["namespace"], source_path diff --git a/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py index 66807e9d5d..8f13cc6ae5 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/connect_yeti_rig.py @@ -5,7 +5,9 @@ from collections import defaultdict from maya import cmds from ayon_core.pipeline import ( - InventoryAction, get_representation_context, get_representation_path + InventoryAction, + get_repres_contexts, + get_representation_path, ) from ayon_core.hosts.maya.api.lib import get_container_members, get_id @@ -28,10 +30,18 @@ class ConnectYetiRig(InventoryAction): # Categorize containers by product type. containers_by_product_type = defaultdict(list) + repre_ids = { + container["representation"] + for container in containers + } + repre_contexts_by_id = get_repres_contexts(repre_ids) for container in containers: - product_type = get_representation_context( - container["representation"] - )["subset"]["data"]["family"] + repre_id = container["representation"] + repre_context = repre_contexts_by_id[repre_id] + + product_type = repre_context["product"]["productType"] + + containers_by_product_type.setdefault(product_type, []) containers_by_product_type[product_type].append(container) # Validate to only 1 source container. @@ -66,11 +76,10 @@ class ConnectYetiRig(InventoryAction): for container in yeti_rig_containers: target_ids.update(self.nodes_by_id(container)) + repre_id = container["representation"] maya_file = get_representation_path( - get_representation_context( - container["representation"] - )["representation"] + repre_contexts_by_id[repre_id]["representation"] ) _, ext = os.path.splitext(maya_file) settings_file = maya_file.replace(ext, ".rigsettings") diff --git a/client/ayon_core/hosts/maya/plugins/inventory/import_modelrender.py b/client/ayon_core/hosts/maya/plugins/inventory/import_modelrender.py index e2cac22836..4655017ae5 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/import_modelrender.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/import_modelrender.py @@ -1,13 +1,11 @@ import re import json -from ayon_core.client import ( - get_representation_by_id, - get_representations -) +import ayon_api + +from ayon_core.pipeline.load import get_representation_contexts_by_ids from ayon_core.pipeline import ( InventoryAction, - get_representation_context, get_current_project_name, ) from ayon_core.hosts.maya.api.lib import ( @@ -35,7 +33,69 @@ class ImportModelRender(InventoryAction): def process(self, containers): from maya import cmds # noqa: F401 + # --- Query entities that will be used --- project_name = get_current_project_name() + # Collect representation ids from all containers + repre_ids = { + container["representation"] + for container in containers + } + # Create mapping of representation id to version id + # - used in containers loop + version_id_by_repre_id = { + repre_entity["id"]: repre_entity["versionId"] + for repre_entity in ayon_api.get_representations( + project_name, + representation_ids=repre_ids, + fields={"id", "versionId"} + ) + } + + # Find all representations of the versions + version_ids = set(version_id_by_repre_id.values()) + repre_entities = ayon_api.get_representations( + project_name, + version_ids=version_ids, + fields={"id", "name", "versionId"} + ) + repre_entities_by_version_id = { + version_id: [] + for version_id in version_ids + } + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + repre_entities_by_version_id[version_id].append(repre_entity) + + look_repres_by_version_id = {} + look_repre_ids = set() + for version_id, repre_entities in ( + repre_entities_by_version_id.items() + ): + json_repre = None + look_repres = [] + scene_type_regex = re.compile(self.scene_type_regex) + for repre_entity in repre_entities: + repre_name = repre_entity["name"] + if repre_name == self.look_data_type: + json_repre = repre_entity + + elif scene_type_regex.fullmatch(repre_name): + look_repres.append(repre_entity) + + look_repre = look_repres[0] if look_repres else None + if look_repre: + look_repre_ids.add(look_repre["id"]) + if json_repre: + look_repre_ids.add(json_repre["id"]) + + look_repres_by_version_id[version_id] = (json_repre, look_repre) + + contexts_by_repre_id = get_representation_contexts_by_ids( + project_name, look_repre_ids + ) + + # --- Real process logic --- + # Loop over containers and assign the looks for container in containers: con_name = container["objectName"] nodes = [] @@ -45,22 +105,34 @@ class ImportModelRender(InventoryAction): else: nodes.append(n) - repr_doc = get_representation_by_id( - project_name, container["representation"], fields=["parent"] - ) - version_id = repr_doc["parent"] + repre_id = container["representation"] + version_id = version_id_by_repre_id.get(repre_id) + if version_id is None: + print("Representation '{}' was not found".format(repre_id)) + continue + + json_repre, look_repre = look_repres_by_version_id[version_id] print("Importing render sets for model %r" % con_name) - self.assign_model_render_by_version(nodes, version_id) + self._assign_model_render( + nodes, json_repre, look_repre, contexts_by_repre_id + ) - def assign_model_render_by_version(self, nodes, version_id): + def _assign_model_render( + self, nodes, json_repre, look_repre, contexts_by_repre_id + ): """Assign nodes a specific published model render data version by id. This assumes the nodes correspond with the asset. Args: - nodes(list): nodes to assign render data to - version_id (bson.ObjectId): database id of the version of model + nodes (list): nodes to assign render data to + json_repre (dict[str, Any]): Representation entity of the json + file. + look_repre (dict[str, Any]): First representation entity of the + look files. + contexts_by_repre_id (dict[str, Any]): Mapping of representation + id to its context. Returns: None @@ -68,33 +140,17 @@ class ImportModelRender(InventoryAction): from maya import cmds # noqa: F401 - project_name = get_current_project_name() - repre_docs = get_representations( - project_name, version_ids=[version_id], fields=["_id", "name"] - ) - # Get representations of shader file and relationships - json_repre = None - look_repres = [] - scene_type_regex = re.compile(self.scene_type_regex) - for repre_doc in repre_docs: - repre_name = repre_doc["name"] - if repre_name == self.look_data_type: - json_repre = repre_doc - continue - - if scene_type_regex.fullmatch(repre_name): - look_repres.append(repre_doc) - - look_repre = look_repres[0] if look_repres else None # QUESTION shouldn't be json representation validated too? if not look_repre: print("No model render sets for this model version..") return - context = get_representation_context(look_repre["_id"]) + # TODO use 'get_representation_path_with_anatomy' instead + # of 'filepath_from_context' + context = contexts_by_repre_id.get(look_repre["id"]) maya_file = self.filepath_from_context(context) - context = get_representation_context(json_repre["_id"]) + context = contexts_by_repre_id.get(json_repre["id"]) json_file = self.filepath_from_context(context) # Import the look file diff --git a/client/ayon_core/hosts/maya/plugins/inventory/rig_recreate_animation_instance.py b/client/ayon_core/hosts/maya/plugins/inventory/rig_recreate_animation_instance.py index 36d9864e99..cbff293cd7 100644 --- a/client/ayon_core/hosts/maya/plugins/inventory/rig_recreate_animation_instance.py +++ b/client/ayon_core/hosts/maya/plugins/inventory/rig_recreate_animation_instance.py @@ -1,7 +1,8 @@ from ayon_core.pipeline import ( InventoryAction, - get_representation_context + get_current_project_name, ) +from ayon_core.pipeline.load import get_representation_contexts_by_ids from ayon_core.hosts.maya.api.lib import ( create_rig_animation_instance, get_container_members, @@ -23,13 +24,21 @@ class RecreateRigAnimationInstance(InventoryAction): ) def process(self, containers): + project_name = get_current_project_name() + repre_ids = { + container["representation"] + for container in containers + } + contexts_by_repre_id = get_representation_contexts_by_ids( + project_name, repre_ids + ) for container in containers: # todo: delete an existing entry if it exist or skip creation namespace = container["namespace"] - representation_id = container["representation"] - context = get_representation_context(representation_id) + repre_id = container["representation"] + context = contexts_by_repre_id[repre_id] nodes = get_container_members(container) create_rig_animation_instance(nodes, context, namespace) diff --git a/client/ayon_core/hosts/maya/plugins/load/_load_animation.py b/client/ayon_core/hosts/maya/plugins/load/_load_animation.py index e6dc1e520a..393f6b0115 100644 --- a/client/ayon_core/hosts/maya/plugins/load/_load_animation.py +++ b/client/ayon_core/hosts/maya/plugins/load/_load_animation.py @@ -46,10 +46,12 @@ def _process_reference(file_url, name, namespace, options): class AbcLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): """Loader to reference an Alembic file""" - families = ["animation", - "camera", - "pointcache"] - representations = ["abc"] + product_types = { + "animation", + "camera", + "pointcache", + } + representations = {"abc"} label = "Reference animation" order = -10 @@ -75,9 +77,11 @@ class AbcLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): class FbxLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): """Loader to reference an Fbx files""" - families = ["animation", - "camera"] - representations = ["fbx"] + product_types = { + "animation", + "camera", + } + representations = {"fbx"} label = "Reference animation" order = -10 diff --git a/client/ayon_core/hosts/maya/plugins/load/actions.py b/client/ayon_core/hosts/maya/plugins/load/actions.py index f979623544..8bef219812 100644 --- a/client/ayon_core/hosts/maya/plugins/load/actions.py +++ b/client/ayon_core/hosts/maya/plugins/load/actions.py @@ -13,11 +13,13 @@ import ayon_core.hosts.maya.api.plugin class SetFrameRangeLoader(load.LoaderPlugin): """Set frame range excluding pre- and post-handles""" - families = ["animation", - "camera", - "proxyAbc", - "pointcache"] - representations = ["abc"] + product_types = { + "animation", + "camera", + "proxyAbc", + "pointcache", + } + representations = {"abc"} label = "Set frame range" order = 11 @@ -28,11 +30,9 @@ class SetFrameRangeLoader(load.LoaderPlugin): import maya.cmds as cmds - version = context['version'] - version_data = version.get("data", {}) - - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + version_attributes = context["version"]["attrib"] + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") if start is None or end is None: print("Skipping setting frame range because start or " @@ -48,11 +48,13 @@ class SetFrameRangeLoader(load.LoaderPlugin): class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): """Set frame range including pre- and post-handles""" - families = ["animation", - "camera", - "proxyAbc", - "pointcache"] - representations = ["abc"] + product_types = { + "animation", + "camera", + "proxyAbc", + "pointcache", + } + representations = {"abc"} label = "Set frame range (with handles)" order = 12 @@ -63,11 +65,10 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): import maya.cmds as cmds - version = context['version'] - version_data = version.get("data", {}) + version_attributes = context["version"]["attrib"] - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") if start is None or end is None: print("Skipping setting frame range because start or " @@ -75,8 +76,8 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): return # Include handles - start -= version_data.get("handleStart", 0) - end += version_data.get("handleEnd", 0) + start -= version_attributes.get("handleStart", 0) + end += version_attributes.get("handleEnd", 0) cmds.playbackOptions(minTime=start, maxTime=end, @@ -93,8 +94,8 @@ class ImportMayaLoader(ayon_core.hosts.maya.api.plugin.Loader): so you could also use it as a new base. """ - representations = ["ma", "mb", "obj"] - families = [ + representations = {"ma", "mb", "obj"} + product_types = { "model", "pointcache", "proxyAbc", @@ -107,8 +108,8 @@ class ImportMayaLoader(ayon_core.hosts.maya.api.plugin.Loader): "rig", "camerarig", "staticMesh", - "workfile" - ] + "workfile", + } label = "Import" order = 10 @@ -124,6 +125,11 @@ class ImportMayaLoader(ayon_core.hosts.maya.api.plugin.Loader): ) ] + @classmethod + def apply_settings(cls, project_settings): + super(ImportMayaLoader, cls).apply_settings(project_settings) + cls.enabled = cls.load_settings["import_loader"].get("enabled", True) + def load(self, context, name=None, namespace=None, data=None): import maya.cmds as cmds diff --git a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py index 312cc3bd6b..4b7d2f42ab 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_arnold_standin.py @@ -12,7 +12,6 @@ from ayon_core.hosts.maya.api.lib import ( unique_namespace, get_attribute_input, maintained_selection, - convert_to_maya_fps ) from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type @@ -29,8 +28,10 @@ def is_sequence(files): class ArnoldStandinLoader(load.LoaderPlugin): """Load as Arnold standin""" - families = ["ass", "animation", "model", "proxyAbc", "pointcache", "usd"] - representations = ["ass", "abc", "usda", "usdc", "usd"] + product_types = { + "ass", "animation", "model", "proxyAbc", "pointcache", "usd" + } + representations = {"ass", "abc", "usda", "usdc", "usd"} label = "Load as Arnold standin" order = -5 @@ -49,15 +50,14 @@ class ArnoldStandinLoader(load.LoaderPlugin): import mtoa.ui.arnoldmenu - version = context['version'] - version_data = version.get("data", {}) + version_attributes = context["version"]["attrib"] - self.log.info("version_data: {}\n".format(version_data)) + self.log.info("version_attributes: {}\n".format(version_attributes)) - asset = context['asset']['name'] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -95,7 +95,7 @@ class ArnoldStandinLoader(load.LoaderPlugin): sequence = is_sequence(os.listdir(os.path.dirname(repre_path))) cmds.setAttr(standin_shape + ".useFrameExtension", sequence) - fps = float(version["data"].get("fps")) or 25 + fps = float(version_attributes.get("fps")) or 25 cmds.setAttr(standin_shape + ".abcFPS", fps) nodes = [root, standin, standin_shape] @@ -177,7 +177,7 @@ class ArnoldStandinLoader(load.LoaderPlugin): return proxy_path, string_replace_operator - def update(self, container, representation): + def update(self, container, context): # Update the standin members = cmds.sets(container['objectName'], query=True) for member in members: @@ -190,7 +190,8 @@ class ArnoldStandinLoader(load.LoaderPlugin): if cmds.nodeType(shapes[0]) == "aiStandIn": standin = shapes[0] - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) proxy_basename, proxy_path = self._get_proxy_path(path) # Whether there is proxy or so, we still update the string operator. @@ -216,12 +217,12 @@ class ArnoldStandinLoader(load.LoaderPlugin): cmds.setAttr( container["objectName"] + ".representation", - str(representation["_id"]), + repre_entity["id"], type="string" ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_assembly.py b/client/ayon_core/hosts/maya/plugins/load/load_assembly.py index e119dfe1c3..0fcbc6bd07 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_assembly.py @@ -12,8 +12,8 @@ from ayon_core.hosts.maya.api import setdress class AssemblyLoader(load.LoaderPlugin): - families = ["assembly"] - representations = ["json"] + product_types = {"assembly"} + representations = {"json"} label = "Load Set Dress" order = -9 @@ -21,11 +21,10 @@ class AssemblyLoader(load.LoaderPlugin): color = "orange" def load(self, context, name, namespace, data): - - asset = context['asset']['name'] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -49,9 +48,9 @@ class AssemblyLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): - return setdress.update_package(container, representation) + return setdress.update_package(container, context) def remove(self, container): """Remove all sub containers""" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_audio.py b/client/ayon_core/hosts/maya/plugins/load/load_audio.py index deeeac66f2..228189f1a1 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_audio.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_audio.py @@ -11,9 +11,9 @@ from ayon_core.hosts.maya.api.lib import unique_namespace, get_container_members class AudioLoader(load.LoaderPlugin): """Specific loader of audio.""" - families = ["audio"] + product_types = {"audio"} label = "Load audio" - representations = ["wav"] + representations = {"wav"} icon = "volume-up" color = "orange" @@ -30,10 +30,10 @@ class AudioLoader(load.LoaderPlugin): displaySound=True ) - asset = context["asset"]["name"] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -45,7 +45,8 @@ class AudioLoader(load.LoaderPlugin): loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): + repre_entity = context["representation"] members = get_container_members(container) audio_nodes = cmds.ls(members, type="audio") @@ -60,7 +61,7 @@ class AudioLoader(load.LoaderPlugin): ) activate_sound = current_sound == audio_node - path = get_representation_path(representation) + path = get_representation_path(repre_entity) cmds.sound( audio_node, @@ -93,12 +94,12 @@ class AudioLoader(load.LoaderPlugin): cmds.setAttr( container["objectName"] + ".representation", - str(representation["_id"]), + repre_entity["id"], type="string" ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py index 38f9d1b7cb..9832d2d657 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_gpucache.py @@ -1,5 +1,3 @@ -import os - import maya.cmds as cmds from ayon_core.hosts.maya.api.pipeline import containerise @@ -15,8 +13,8 @@ from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class GpuCacheLoader(load.LoaderPlugin): """Load Alembic as gpuCache""" - families = ["model", "animation", "proxyAbc", "pointcache"] - representations = ["abc", "gpu_cache"] + product_types = {"model", "animation", "proxyAbc", "pointcache"} + representations = {"abc", "gpu_cache"} label = "Load Gpu Cache" order = -5 @@ -24,11 +22,10 @@ class GpuCacheLoader(load.LoaderPlugin): color = "orange" def load(self, context, name, namespace, data): - - asset = context['asset']['name'] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -74,8 +71,9 @@ class GpuCacheLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): - path = get_representation_path(representation) + def update(self, container, context): + repre_entity = context["representation"] + path = get_representation_path(repre_entity) # Update the cache members = cmds.sets(container['objectName'], query=True) @@ -87,11 +85,11 @@ class GpuCacheLoader(load.LoaderPlugin): cmds.setAttr(cache + ".cacheFileName", path, type="string") cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + repre_entity["id"], type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image.py b/client/ayon_core/hosts/maya/plugins/load/load_image.py index aedeb63e3d..5b0858ce70 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image.py @@ -1,10 +1,8 @@ -import os import copy from ayon_core.lib import EnumDef from ayon_core.pipeline import ( load, - get_representation_context, get_current_host_name, ) from ayon_core.pipeline.load.utils import get_representation_path_from_context @@ -93,9 +91,9 @@ def create_stencil(): class FileNodeLoader(load.LoaderPlugin): """File node loader.""" - families = ["image", "plate", "render"] + product_types = {"image", "plate", "render"} label = "Load file node" - representations = ["exr", "tif", "png", "jpg"] + representations = {"exr", "tif", "png", "jpg"} icon = "image" color = "orange" order = 2 @@ -114,11 +112,10 @@ class FileNodeLoader(load.LoaderPlugin): ] def load(self, context, name, namespace, data): - - asset = context['asset']['name'] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -146,23 +143,23 @@ class FileNodeLoader(load.LoaderPlugin): loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): + repre_entity = context["representation"] members = cmds.sets(container['objectName'], query=True) file_node = cmds.ls(members, type="file")[0] - context = get_representation_context(representation) self._apply_representation_context(context, file_node) # Update representation cmds.setAttr( container["objectName"] + ".representation", - str(representation["_id"]), + repre_entity["id"], type="string" ) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) @@ -223,15 +220,18 @@ class FileNodeLoader(load.LoaderPlugin): def _is_sequence(self, context): """Check whether frameStart and frameEnd are not the same.""" - version = context.get("version", {}) - representation = context.get("representation", {}) + version = context["version"] + representation = context["representation"] - for doc in [representation, version]: + # TODO this is invalid logic, it should be based only on + # representation entity + for entity in [representation, version]: # Frame range can be set on version or representation. # When set on representation it overrides version data. - data = doc.get("data", {}) - start = data.get("frameStartHandle", data.get("frameStart", None)) - end = data.get("frameEndHandle", data.get("frameEnd", None)) + attributes = entity["attrib"] + data = entity["data"] + start = data.get("frameStartHandle", attributes.get("frameStart")) + end = data.get("frameEndHandle", attributes.get("frameEnd")) if start is None or end is None: continue @@ -301,7 +301,7 @@ class FileNodeLoader(load.LoaderPlugin): context = copy.deepcopy(context) representation = context["representation"] - template = representation.get("data", {}).get("template") + template = representation.get("attrib", {}).get("template") if not template: # No template to find token locations for return get_representation_path_from_context(context) @@ -327,7 +327,7 @@ class FileNodeLoader(load.LoaderPlugin): has_tokens = True # Replace with our custom template that has the tokens set - representation["data"]["template"] = template + representation["attrib"]["template"] = template path = get_representation_path_from_context(context) if has_tokens: diff --git a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py index a685a4a41c..15c7654c52 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_image_plane.py @@ -1,14 +1,8 @@ from qtpy import QtWidgets, QtCore -from ayon_core.client import ( - get_asset_by_id, - get_subset_by_id, - get_version_by_id, -) from ayon_core.pipeline import ( load, get_representation_path, - get_current_project_name, ) from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.lib import ( @@ -93,19 +87,19 @@ class CameraWindow(QtWidgets.QDialog): class ImagePlaneLoader(load.LoaderPlugin): """Specific loader of plate for image planes on selected camera.""" - families = ["image", "plate", "render"] + product_types = {"image", "plate", "render"} label = "Load imagePlane" - representations = ["mov", "exr", "preview", "png", "jpg"] + representations = {"mov", "exr", "preview", "png", "jpg"} icon = "image" color = "orange" def load(self, context, name, namespace, data, options=None): image_plane_depth = 1000 - asset = context['asset']['name'] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -148,9 +142,21 @@ class ImagePlaneLoader(load.LoaderPlugin): with namespaced(namespace): # Create inside the namespace image_plane_transform, image_plane_shape = cmds.imagePlane( - fileName=context["representation"]["data"]["path"], + fileName=self.filepath_from_context(context), camera=camera ) + + # Set colorspace + colorspace = self.get_colorspace(context["representation"]) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + + # Set offset frame range start_frame = cmds.playbackOptions(query=True, min=True) end_frame = cmds.playbackOptions(query=True, max=True) @@ -165,7 +171,7 @@ class ImagePlaneLoader(load.LoaderPlugin): plug = "{}.{}".format(image_plane_shape, attr) cmds.setAttr(plug, value) - movie_representations = ["mov", "preview"] + movie_representations = {"mov", "preview"} if context["representation"]["name"] in movie_representations: cmds.setAttr(image_plane_shape + ".type", 2) @@ -205,34 +211,35 @@ class ImagePlaneLoader(load.LoaderPlugin): loader=self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): + folder_entity = context["folder"] + repre_entity = context["representation"] members = get_container_members(container) image_planes = cmds.ls(members, type="imagePlane") assert image_planes, "Image plane not found." image_plane_shape = image_planes[0] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) cmds.setAttr("{}.imageName".format(image_plane_shape), path, type="string") cmds.setAttr("{}.representation".format(container["objectName"]), - str(representation["_id"]), + repre_entity["id"], type="string") + colorspace = self.get_colorspace(repre_entity) + if colorspace: + cmds.setAttr( + "{}.ignoreColorSpaceFileRules".format(image_plane_shape), + True + ) + cmds.setAttr("{}.colorSpace".format(image_plane_shape), + colorspace, type="string") + # Set frame range. - project_name = get_current_project_name() - version = get_version_by_id( - project_name, representation["parent"], fields=["parent"] - ) - subset_doc = get_subset_by_id( - project_name, version["parent"], fields=["parent"] - ) - asset_doc = get_asset_by_id( - project_name, subset_doc["parent"], fields=["parent"] - ) - start_frame = asset_doc["data"]["frameStart"] - end_frame = asset_doc["data"]["frameEnd"] + start_frame = folder_entity["attrib"]["frameStart"] + end_frame = folder_entity["attrib"]["frameEnd"] for attr, value in { "frameOffset": 0, @@ -243,8 +250,8 @@ class ImagePlaneLoader(load.LoaderPlugin): plug = "{}.{}".format(image_plane_shape, attr) cmds.setAttr(plug, value) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): members = cmds.sets(container['objectName'], query=True) @@ -257,3 +264,12 @@ class ImagePlaneLoader(load.LoaderPlugin): deleteNamespaceContent=True) except RuntimeError: pass + + def get_colorspace(self, representation): + + data = representation.get("data", {}).get("colorspaceData", {}) + if not data: + return + + colorspace = data.get("colorspace") + return colorspace diff --git a/client/ayon_core/hosts/maya/plugins/load/load_look.py b/client/ayon_core/hosts/maya/plugins/load/load_look.py index ba5891469d..af0e000dd2 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_look.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_look.py @@ -4,12 +4,9 @@ import json from collections import defaultdict from qtpy import QtWidgets +from ayon_api import get_representation_by_name -from ayon_core.client import get_representation_by_name -from ayon_core.pipeline import ( - get_current_project_name, - get_representation_path, -) +from ayon_core.pipeline import get_representation_path import ayon_core.hosts.maya.api.plugin from ayon_core.hosts.maya.api import lib from ayon_core.hosts.maya.api.lib import get_reference_node @@ -20,8 +17,8 @@ from ayon_core.tools.utils import ScrollMessageBox class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): """Specific loader for lookdev""" - families = ["look"] - representations = ["ma"] + product_types = {"look"} + representations = {"ma"} label = "Reference look" order = -10 @@ -43,10 +40,10 @@ class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): self[:] = nodes - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """ Called by Scene Inventory when look should be updated to current version. @@ -56,7 +53,7 @@ class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): Args: container: object that has look to be updated - representation: (dict): relationship data to get proper + context: (dict): relationship data to get proper representation from DB and persisted data in .json Returns: @@ -72,15 +69,16 @@ class LookLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): orig_nodes = set(self._get_nodes_with_shader(shader_nodes)) # Trigger the regular reference update on the ReferenceLoader - super(LookLoader, self).update(container, representation) + super(LookLoader, self).update(container, context) # get new applied shaders and nodes from new version shader_nodes = cmds.ls(members, type='shadingEngine') nodes = set(self._get_nodes_with_shader(shader_nodes)) - project_name = get_current_project_name() + version_id = context["version"]["id"] + project_name = context["project"]["name"] json_representation = get_representation_by_name( - project_name, "json", representation["parent"] + project_name, "json", version_id ) # Load relationships diff --git a/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py b/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py index 885d2dbae1..b19b14b1aa 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_matchmove.py @@ -8,8 +8,8 @@ class MatchmoveLoader(load.LoaderPlugin): Supported script types are .py and .mel """ - families = ["matchmove"] - representations = ["py", "mel"] + product_types = {"matchmove"} + representations = {"py", "mel"} defaults = ["Camera", "Object", "Mocap"] label = "Run matchmove script" diff --git a/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py b/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py index c2bea1501c..628a25e574 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_maya_usd.py @@ -16,8 +16,8 @@ from ayon_core.hosts.maya.api.pipeline import containerise class MayaUsdLoader(load.LoaderPlugin): """Read USD data in a Maya USD Proxy""" - families = ["model", "usd", "pointcache", "animation"] - representations = ["usd", "usda", "usdc", "usdz", "abc"] + product_types = {"model", "usd", "pointcache", "animation"} + representations = {"usd", "usda", "usdc", "usdz", "abc"} label = "Load USD to Maya Proxy" order = -1 @@ -25,10 +25,10 @@ class MayaUsdLoader(load.LoaderPlugin): color = "orange" def load(self, context, name=None, namespace=None, options=None): - asset = context['asset']['name'] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -69,7 +69,7 @@ class MayaUsdLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" node = container['objectName'] @@ -78,16 +78,17 @@ class MayaUsdLoader(load.LoaderPlugin): members = cmds.sets(node, query=True) or [] shapes = cmds.ls(members, type="mayaUsdProxyShape") - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) for shape in shapes: cmds.setAttr("{}.filePath".format(shape), path, type="string") cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): # type: (dict) -> None diff --git a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py index a9ba2b8773..f32c76481d 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd.py @@ -3,6 +3,8 @@ import maya.cmds as cmds from maya import mel import os +from ayon_api import get_representation_by_id + from ayon_core.pipeline import ( load, get_representation_path @@ -13,15 +15,20 @@ from ayon_core.hosts.maya.api.lib import ( unique_namespace ) from ayon_core.hosts.maya.api.pipeline import containerise -from ayon_core.client import get_representation_by_id class MultiverseUsdLoader(load.LoaderPlugin): """Read USD data in a Multiverse Compound""" - families = ["model", "usd", "mvUsdComposition", "mvUsdOverride", - "pointcache", "animation"] - representations = ["usd", "usda", "usdc", "usdz", "abc"] + product_types = { + "model", + "usd", + "mvUsdComposition", + "mvUsdOverride", + "pointcache", + "animation", + } + representations = {"usd", "usda", "usdc", "usdz", "abc"} label = "Load USD to Multiverse" order = -10 @@ -29,10 +36,10 @@ class MultiverseUsdLoader(load.LoaderPlugin): color = "orange" def load(self, context, name=None, namespace=None, options=None): - asset = context['asset']['name'] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -60,7 +67,7 @@ class MultiverseUsdLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" node = container['objectName'] @@ -70,11 +77,13 @@ class MultiverseUsdLoader(load.LoaderPlugin): shapes = cmds.ls(members, type="mvUsdCompoundShape") assert shapes, "Cannot find mvUsdCompoundShape in container" - project_name = representation["context"]["project"]["name"] + project_name = context["project"]["name"] + repre_entity = context["representation"] + path = get_representation_path(repre_entity) prev_representation_id = cmds.getAttr("{}.representation".format(node)) prev_representation = get_representation_by_id(project_name, prev_representation_id) - prev_path = os.path.normpath(prev_representation["data"]["path"]) + prev_path = os.path.normpath(prev_representation["attrib"]["path"]) # Make sure we can load the plugin cmds.loadPlugin("MultiverseForMaya", quiet=True) @@ -89,18 +98,17 @@ class MultiverseUsdLoader(load.LoaderPlugin): "Couldn't find matching path (or too many)" prev_path_idx = asset_paths.index(prev_path) - path = get_representation_path(representation) asset_paths[prev_path_idx] = path multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") mel.eval('refreshEditorTemplates;') - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): # type: (dict) -> None diff --git a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py index d448dc74a8..b23fa48f07 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_multiverse_usd_over.py @@ -4,6 +4,7 @@ from maya import mel import os import qargparse +from ayon_api import get_representation_by_id from ayon_core.pipeline import ( load, @@ -13,14 +14,13 @@ from ayon_core.hosts.maya.api.lib import ( maintained_selection ) from ayon_core.hosts.maya.api.pipeline import containerise -from ayon_core.client import get_representation_by_id class MultiverseUsdOverLoader(load.LoaderPlugin): """Reference file""" - families = ["mvUsdOverride"] - representations = ["usda", "usd", "udsz"] + product_types = {"mvUsdOverride"} + representations = {"usda", "usd", "udsz"} label = "Load Usd Override into Compound" order = -10 @@ -71,7 +71,7 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): return container - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" @@ -88,13 +88,14 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): mvShape = container['mvUsdCompoundShape'] assert mvShape, "Missing mv source" - project_name = representation["context"]["project"]["name"] + project_name = context["project"]["name"] + repre_entity = context["representation"] prev_representation_id = cmds.getAttr("{}.representation".format(node)) prev_representation = get_representation_by_id(project_name, prev_representation_id) - prev_path = os.path.normpath(prev_representation["data"]["path"]) + prev_path = os.path.normpath(prev_representation["attrib"]["path"]) - path = get_representation_path(representation) + path = get_representation_path(repre_entity) for shape in shapes: asset_paths = multiverse.GetUsdCompoundAssetPaths(shape) @@ -107,12 +108,12 @@ class MultiverseUsdOverLoader(load.LoaderPlugin): multiverse.SetUsdCompoundAssetPaths(shape, asset_paths) cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") mel.eval('refreshEditorTemplates;') - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): # type: (dict) -> None diff --git a/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py b/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py index eb7e0957ac..7760d4081c 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_redshift_proxy.py @@ -22,8 +22,8 @@ from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class RedshiftProxyLoader(load.LoaderPlugin): """Load Redshift proxy""" - families = ["redshiftproxy"] - representations = ["rs"] + product_types = {"redshiftproxy"} + representations = {"rs"} label = "Import Redshift Proxy" order = -10 @@ -32,15 +32,12 @@ class RedshiftProxyLoader(load.LoaderPlugin): def load(self, context, name=None, namespace=None, options=None): """Plugin entry point.""" - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "redshiftproxy" + product_type = context["product"]["productType"] - asset_name = context['asset']["name"] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset_name + "_", - prefix="_" if asset_name[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -75,7 +72,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): node = container['objectName'] assert cmds.objExists(node), "Missing container" @@ -83,8 +80,8 @@ class RedshiftProxyLoader(load.LoaderPlugin): members = cmds.sets(node, query=True) or [] rs_meshes = cmds.ls(members, type="RedshiftProxyMesh") assert rs_meshes, "Cannot find RedshiftProxyMesh in container" - - filename = get_representation_path(representation) + repre_entity = context["representation"] + filename = get_representation_path(repre_entity) for rs_mesh in rs_meshes: cmds.setAttr("{}.fileName".format(rs_mesh), @@ -93,7 +90,7 @@ class RedshiftProxyLoader(load.LoaderPlugin): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") def remove(self, container): @@ -113,8 +110,8 @@ class RedshiftProxyLoader(load.LoaderPlugin): self.log.warning("Namespace not deleted because it " "still has members: %s", namespace) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def create_rs_proxy(self, name, path): """Creates Redshift Proxies showing a proxy object. diff --git a/client/ayon_core/hosts/maya/plugins/load/load_reference.py b/client/ayon_core/hosts/maya/plugins/load/load_reference.py index eee3d92641..847591bd11 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_reference.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_reference.py @@ -89,23 +89,25 @@ def preserve_modelpanel_cameras(container, log=None): class ReferenceLoader(plugin.ReferenceLoader): """Reference file""" - families = ["model", - "pointcache", - "proxyAbc", - "animation", - "mayaAscii", - "mayaScene", - "setdress", - "layout", - "camera", - "rig", - "camerarig", - "staticMesh", - "skeletalMesh", - "mvLook", - "matchmove"] + product_types = { + "model", + "pointcache", + "proxyAbc", + "animation", + "mayaAscii", + "mayaScene", + "setdress", + "layout", + "camera", + "rig", + "camerarig", + "staticMesh", + "skeletalMesh", + "mvLook", + "matchmove", + } - representations = ["ma", "abc", "fbx", "mb"] + representations = {"ma", "abc", "fbx", "mb"} label = "Reference" order = -10 @@ -115,11 +117,7 @@ class ReferenceLoader(plugin.ReferenceLoader): def process_reference(self, context, name, namespace, options): import maya.cmds as cmds - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "model" - + product_type = context["product"]["productType"] project_name = context["project"]["name"] # True by default to keep legacy behaviours attach_to_root = options.get("attach_to_root", True) @@ -231,12 +229,12 @@ class ReferenceLoader(plugin.ReferenceLoader): *options["translate"]) return new_nodes - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): with preserve_modelpanel_cameras(container, log=self.log): - super(ReferenceLoader, self).update(container, representation) + super(ReferenceLoader, self).update(container, context) # We also want to lock camera transforms on any new cameras in the # reference or for a camera which might have changed names. @@ -270,8 +268,8 @@ class MayaUSDReferenceLoader(ReferenceLoader): """Reference USD file to native Maya nodes using MayaUSDImport reference""" label = "Reference Maya USD" - families = ["usd"] - representations = ["usd"] + product_types = {"usd"} + representations = {"usd"} extensions = {"usd", "usda", "usdc"} options = ReferenceLoader.options + [ diff --git a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py index e77e270663..d5685b2c4c 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_rendersetup.py @@ -9,7 +9,9 @@ instance. import json import sys import six +import contextlib +from ayon_core.lib import BoolDef, EnumDef from ayon_core.pipeline import ( load, get_representation_path @@ -21,59 +23,115 @@ from maya import cmds import maya.app.renderSetup.model.renderSetup as renderSetup +@contextlib.contextmanager +def mark_all_imported(enabled): + """Mark all imported nodes accepted by removing the `imported` attribute""" + if not enabled: + yield + return + + node_types = cmds.pluginInfo("renderSetup", query=True, dependNode=True) + + # Get node before load, then we can disable `imported` + # attribute on all new render setup layers after import + before = cmds.ls(type=node_types, long=True) + try: + yield + finally: + after = cmds.ls(type=node_types, long=True) + for node in (node for node in after if node not in before): + if cmds.attributeQuery("imported", + node=node, + exists=True): + plug = "{}.imported".format(node) + if cmds.getAttr(plug): + cmds.deleteAttr(plug) + + class RenderSetupLoader(load.LoaderPlugin): """Load json preset for RenderSetup overwriting current one.""" - families = ["rendersetup"] - representations = ["json"] + product_types = {"rendersetup"} + representations = {"json"} defaults = ['Main'] label = "Load RenderSetup template" icon = "tablet" color = "orange" + options = [ + BoolDef("accept_import", + label="Accept import on load", + tooltip=( + "By default importing or pasting Render Setup collections " + "will display them italic in the Render Setup list.\nWith " + "this enabled the load will directly mark the import " + "'accepted' and remove the italic view." + ), + default=True), + BoolDef("load_managed", + label="Load Managed", + tooltip=( + "Containerize the rendersetup on load so it can be " + "'updated' later." + ), + default=True), + EnumDef("import_mode", + label="Import mode", + items={ + renderSetup.DECODE_AND_OVERWRITE: ( + "Flush existing render setup and " + "add without any namespace" + ), + renderSetup.DECODE_AND_MERGE: ( + "Merge with the existing render setup objects and " + "rename the unexpected objects" + ), + renderSetup.DECODE_AND_RENAME: ( + "Renaming all decoded render setup objects to not " + "conflict with the existing render setup" + ), + }, + default=renderSetup.DECODE_AND_OVERWRITE) + ] + def load(self, context, name, namespace, data): """Load RenderSetup settings.""" - # from ayon_core.hosts.maya.api.lib import namespaced - - asset = context['asset']['name'] - namespace = namespace or lib.unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", - suffix="_", - ) path = self.filepath_from_context(context) + + accept_import = data.get("accept_import", True) + import_mode = data.get("import_mode", renderSetup.DECODE_AND_OVERWRITE) + self.log.info(">>> loading json [ {} ]".format(path)) - with open(path, "r") as file: - renderSetup.instance().decode( - json.load(file), renderSetup.DECODE_AND_OVERWRITE, None) + with mark_all_imported(accept_import): + with open(path, "r") as file: + renderSetup.instance().decode( + json.load(file), import_mode, None) - nodes = [] - null = cmds.sets(name="null_SET", empty=True) - nodes.append(null) + if data.get("load_managed", True): + self.log.info(">>> containerising [ {} ]".format(name)) + folder_name = context["folder"]["name"] + namespace = namespace or lib.unique_namespace( + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", + suffix="_", + ) - self[:] = nodes - if not nodes: - return - - self.log.info(">>> containerising [ {} ]".format(name)) - return containerise( - name=name, - namespace=namespace, - nodes=nodes, - context=context, - loader=self.__class__.__name__) + return containerise( + name=name, + namespace=namespace, + nodes=[], + context=context, + loader=self.__class__.__name__) def remove(self, container): """Remove RenderSetup settings instance.""" - from maya import cmds - container_name = container["objectName"] self.log.info("Removing '%s' from Maya.." % container["name"]) - container_content = cmds.sets(container_name, query=True) + container_content = cmds.sets(container_name, query=True) or [] nodes = cmds.ls(container_content, long=True) nodes.append(container_name) @@ -84,14 +142,15 @@ class RenderSetupLoader(load.LoaderPlugin): # Already implicitly deleted by Maya upon removing reference pass - def update(self, container, representation): + def update(self, container, context): """Update RenderSetup setting by overwriting existing settings.""" lib.show_message( "Render setup update", "Render setup setting will be overwritten by new version. All " "setting specified by user not included in loaded version " "will be lost.") - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) with open(path, "r") as file: try: renderSetup.instance().decode( @@ -103,10 +162,10 @@ class RenderSetupLoader(load.LoaderPlugin): # Update metadata node = container["objectName"] cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") self.log.info("... updated") - def switch(self, container, representation): + def switch(self, container, context): """Switch representations.""" - self.update(container, representation) + self.update(container, context) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py index 80a7fa6006..5b0c78fd6f 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_arnold.py @@ -12,8 +12,8 @@ from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class LoadVDBtoArnold(load.LoaderPlugin): """Load OpenVDB for Arnold in aiVolume""" - families = ["vdbcache"] - representations = ["vdb"] + product_types = {"vdbcache"} + representations = {"vdb"} label = "Load VDB to Arnold" icon = "cloud" @@ -25,10 +25,7 @@ class LoadVDBtoArnold(load.LoaderPlugin): from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.lib import unique_namespace - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "vdbcache" + product_type = context["product"]["productType"] # Check if the plugin for arnold is available on the pc try: @@ -37,11 +34,10 @@ class LoadVDBtoArnold(load.LoaderPlugin): self.log.error("Encountered exception:\n%s" % exc) return - asset = context['asset'] - asset_name = asset["name"] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset_name + "_", - prefix="_" if asset_name[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -65,7 +61,7 @@ class LoadVDBtoArnold(load.LoaderPlugin): path = self.filepath_from_context(context) self._set_path(grid_node, path=path, - representation=context["representation"]) + repre_entity=context["representation"]) # Lock the shape node so the user can't delete the transform/shape # as if it was referenced @@ -81,11 +77,13 @@ class LoadVDBtoArnold(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from maya import cmds - path = get_representation_path(representation) + repre_entity = context["representation"] + + path = get_representation_path(repre_entity) # Find VRayVolumeGrid members = cmds.sets(container['objectName'], query=True) @@ -93,21 +91,21 @@ class LoadVDBtoArnold(load.LoaderPlugin): assert len(grid_nodes) == 1, "This is a bug" # Update the VRayVolumeGrid - self._set_path(grid_nodes[0], path=path, representation=representation) + self._set_path(grid_nodes[0], path=path, repre_entity=repre_entity) # Update container representation cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + repre_entity["id"], type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): from maya import cmds - # Get all members of the avalon container, ensure they are unlocked + # Get all members of the AYON container, ensure they are unlocked # and delete everything members = cmds.sets(container['objectName'], query=True) cmds.lockNode(members, lock=False) @@ -123,14 +121,14 @@ class LoadVDBtoArnold(load.LoaderPlugin): @staticmethod def _set_path(grid_node, path, - representation): + repre_entity): """Apply the settings for the VDB path to the aiVolume node""" from maya import cmds if not os.path.exists(path): raise RuntimeError("Path does not exist: %s" % path) - is_sequence = bool(representation["context"].get("frame")) + is_sequence = "frame" in repre_entity["context"] cmds.setAttr(grid_node + ".useFrameExtension", is_sequence) # Set file path diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py index 65bef51ec6..e345a7bf6f 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_redshift.py @@ -18,8 +18,8 @@ class LoadVDBtoRedShift(load.LoaderPlugin): """ - families = ["vdbcache"] - representations = ["vdb"] + product_types = {"vdbcache"} + representations = {"vdb"} label = "Load VDB to RedShift" icon = "cloud" @@ -31,10 +31,7 @@ class LoadVDBtoRedShift(load.LoaderPlugin): from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.lib import unique_namespace - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "vdbcache" + product_type = context["product"]["productType"] # Check if the plugin for redshift is available on the pc try: @@ -55,12 +52,10 @@ class LoadVDBtoRedShift(load.LoaderPlugin): "set the render engine to '%s'" % compatible) - asset = context['asset'] - - asset_name = asset["name"] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset_name + "_", - prefix="_" if asset_name[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -95,10 +90,11 @@ class LoadVDBtoRedShift(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): from maya import cmds - path = get_representation_path(representation) + repre_entity = context["representation"] + path = get_representation_path(repre_entity) # Find VRayVolumeGrid members = cmds.sets(container['objectName'], query=True) @@ -106,17 +102,17 @@ class LoadVDBtoRedShift(load.LoaderPlugin): assert len(grid_nodes) == 1, "This is a bug" # Update the VRayVolumeGrid - self._set_path(grid_nodes[0], path=path, representation=representation) + self._set_path(grid_nodes[0], path=path, representation=repre_entity) # Update container representation cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + repre_entity["id"], type="string") def remove(self, container): from maya import cmds - # Get all members of the avalon container, ensure they are unlocked + # Get all members of the AYON container, ensure they are unlocked # and delete everything members = cmds.sets(container['objectName'], query=True) cmds.lockNode(members, lock=False) @@ -129,8 +125,8 @@ class LoadVDBtoRedShift(load.LoaderPlugin): except RuntimeError: pass - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) @staticmethod def _set_path(grid_node, @@ -142,7 +138,7 @@ class LoadVDBtoRedShift(load.LoaderPlugin): if not os.path.exists(path): raise RuntimeError("Path does not exist: %s" % path) - is_sequence = bool(representation["context"].get("frame")) + is_sequence = "frame" in representation["context"] cmds.setAttr(grid_node + ".useFrameExtension", is_sequence) # Set file path diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py index 4b18e60c9d..d6d1c948b0 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vdb_to_vray.py @@ -77,8 +77,8 @@ def _fix_duplicate_vvg_callbacks(): class LoadVDBtoVRay(load.LoaderPlugin): """Load OpenVDB in a V-Ray Volume Grid""" - families = ["vdbcache"] - representations = ["vdb"] + product_types = {"vdbcache"} + representations = {"vdb"} label = "Load VDB to VRay" icon = "cloud" @@ -94,10 +94,7 @@ class LoadVDBtoVRay(load.LoaderPlugin): "Path does not exist: %s" % path ) - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "vdbcache" + product_type = context["product"]["productType"] # Ensure V-ray is loaded with the vrayvolumegrid if not cmds.pluginInfo("vrayformaya", query=True, loaded=True): @@ -116,11 +113,10 @@ class LoadVDBtoVRay(load.LoaderPlugin): "See Preferences > Display > Viewport 2.0 to " "set the render engine to '%s'" % compatible) - asset = context['asset'] - asset_name = asset["name"] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset_name + "_", - prefix="_" if asset_name[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -254,9 +250,10 @@ class LoadVDBtoVRay(load.LoaderPlugin): restored_mapping, type="string") - def update(self, container, representation): + def update(self, container, context): + repre_entity = context["representation"] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) # Find VRayVolumeGrid members = cmds.sets(container['objectName'], query=True) @@ -269,15 +266,15 @@ class LoadVDBtoVRay(load.LoaderPlugin): # Update container representation cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), + repre_entity["id"], type="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): - # Get all members of the avalon container, ensure they are unlocked + # Get all members of the AYON container, ensure they are unlocked # and delete everything members = cmds.sets(container['objectName'], query=True) cmds.lockNode(members, lock=False) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py index d4aad10762..14d645021c 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vrayproxy.py @@ -9,11 +9,10 @@ import os import maya.cmds as cmds -from ayon_core.client import get_representation_by_name +import ayon_api from ayon_core.settings import get_project_settings from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.maya.api.lib import ( @@ -28,8 +27,8 @@ from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class VRayProxyLoader(load.LoaderPlugin): """Load VRay Proxy with Alembic or VrayMesh.""" - families = ["vrayproxy", "model", "pointcache", "animation"] - representations = ["vrmesh", "abc"] + product_types = {"vrayproxy", "model", "pointcache", "animation"} + representations = {"vrmesh", "abc"} label = "Import VRay Proxy" order = -10 @@ -48,20 +47,19 @@ class VRayProxyLoader(load.LoaderPlugin): """ - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "vrayproxy" + product_type = context["product"]["productType"] # get all representations for this version - filename = self._get_abc(context["version"]["_id"]) + filename = self._get_abc( + context["project"]["name"], context["version"]["id"] + ) if not filename: filename = self.filepath_from_context(context) - asset_name = context['asset']["name"] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset_name + "_", - prefix="_" if asset_name[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -96,7 +94,7 @@ class VRayProxyLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): # type: (dict, dict) -> None """Update container with specified representation.""" node = container['objectName'] @@ -107,10 +105,12 @@ class VRayProxyLoader(load.LoaderPlugin): assert vraymeshes, "Cannot find VRayMesh in container" # get all representations for this version - filename = ( - self._get_abc(representation["parent"]) - or get_representation_path(representation) + repre_entity = context["representation"] + filename = self._get_abc( + context["project"]["name"], context["version"]["id"] ) + if not filename: + filename = get_representation_path(repre_entity) for vray_mesh in vraymeshes: cmds.setAttr("{}.fileName".format(vray_mesh), @@ -119,7 +119,7 @@ class VRayProxyLoader(load.LoaderPlugin): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") def remove(self, container): @@ -140,10 +140,10 @@ class VRayProxyLoader(load.LoaderPlugin): self.log.warning("Namespace not deleted because it " "still has members: %s", namespace) - def switch(self, container, representation): + def switch(self, container, context): # type: (dict, dict) -> None """Switch loaded representation.""" - self.update(container, representation) + self.update(container, context) def create_vray_proxy(self, name, filename): # type: (str, str) -> (list, str) @@ -169,7 +169,7 @@ class VRayProxyLoader(load.LoaderPlugin): return [parent, proxy], parent - def _get_abc(self, version_id): + def _get_abc(self, project_name, version_id): # type: (str) -> str """Get abc representation file path if present. @@ -177,6 +177,7 @@ class VRayProxyLoader(load.LoaderPlugin): vray proxy, get is file path. Args: + project_name (str): Project name. version_id (str): Version hash id. Returns: @@ -186,8 +187,9 @@ class VRayProxyLoader(load.LoaderPlugin): """ self.log.debug( "Looking for abc in published representations of this version.") - project_name = get_current_project_name() - abc_rep = get_representation_by_name(project_name, "abc", version_id) + abc_rep = ayon_api.get_representation_by_name( + project_name, "abc", version_id + ) if abc_rep: self.log.debug("Found, we'll link alembic to vray proxy.") file_name = get_representation_path(abc_rep) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py b/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py index 04ccf57808..ea3215da97 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_vrayscene.py @@ -17,8 +17,8 @@ from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type class VRaySceneLoader(load.LoaderPlugin): """Load Vray scene""" - families = ["vrayscene_layer"] - representations = ["vrscene"] + product_types = {"vrayscene_layer"} + representations = {"vrscene"} label = "Import VRay Scene" order = -10 @@ -26,15 +26,12 @@ class VRaySceneLoader(load.LoaderPlugin): color = "orange" def load(self, context, name, namespace, data): - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "vrayscene_layer" + product_type = context["product"]["productType"] - asset_name = context['asset']["name"] + folder_name = context["folder"]["name"] namespace = namespace or unique_namespace( - asset_name + "_", - prefix="_" if asset_name[0].isdigit() else "", + folder_name + "_", + prefix="_" if folder_name[0].isdigit() else "", suffix="_", ) @@ -71,7 +68,7 @@ class VRaySceneLoader(load.LoaderPlugin): context=context, loader=self.__class__.__name__) - def update(self, container, representation): + def update(self, container, context): node = container['objectName'] assert cmds.objExists(node), "Missing container" @@ -80,7 +77,8 @@ class VRaySceneLoader(load.LoaderPlugin): vraymeshes = cmds.ls(members, type="VRayScene") assert vraymeshes, "Cannot find VRayScene in container" - filename = get_representation_path(representation) + repre_entity = context["representation"] + filename = get_representation_path(repre_entity) for vray_mesh in vraymeshes: cmds.setAttr("{}.FilePath".format(vray_mesh), @@ -89,7 +87,7 @@ class VRaySceneLoader(load.LoaderPlugin): # Update metadata cmds.setAttr("{}.representation".format(node), - str(representation["_id"]), + repre_entity["id"], type="string") def remove(self, container): @@ -109,8 +107,8 @@ class VRaySceneLoader(load.LoaderPlugin): self.log.warning("Namespace not deleted because it " "still has members: %s", namespace) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def create_vray_scene(self, name, filename): """Re-create the structure created by VRay to support vrscenes diff --git a/client/ayon_core/hosts/maya/plugins/load/load_xgen.py b/client/ayon_core/hosts/maya/plugins/load/load_xgen.py index 4c38835350..e2664439b0 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_xgen.py @@ -20,8 +20,8 @@ from ayon_core.pipeline import get_representation_path class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): """Load Xgen as reference""" - families = ["xgen"] - representations = ["ma", "mb"] + product_types = {"xgen"} + representations = {"ma", "mb"} label = "Reference Xgen" icon = "code-fork" @@ -113,7 +113,7 @@ class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): ) cmds.setAttr("{}.xgExportAsDelta".format(xgen_palette), True) - def update(self, container, representation): + def update(self, container, context): """Workflow for updating Xgen. - Export changes to delta file. @@ -147,7 +147,8 @@ class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): self.set_palette_attributes(xgen_palette, xgen_file, xgd_file) - maya_file = get_representation_path(representation) + repre_entity = context["representation"] + maya_file = get_representation_path(repre_entity) _, extension = os.path.splitext(maya_file) new_xgen_file = maya_file.replace(extension, ".xgen") data_path = "" @@ -173,7 +174,7 @@ class XgenLoader(ayon_core.hosts.maya.api.plugin.ReferenceLoader): "{}.xgExportAsDelta".format(xgen_palette): False } with attribute_values(attribute_data): - super().update(container, representation) + super().update(container, context) xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py index 372727d400..4ca9ae9d03 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_cache.py @@ -12,6 +12,7 @@ from ayon_core.pipeline import ( get_representation_path ) from ayon_core.hosts.maya.api import lib +from ayon_core.hosts.maya.api.yeti import create_yeti_variable from ayon_core.hosts.maya.api.pipeline import containerise from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type @@ -23,8 +24,19 @@ SKIP_UPDATE_ATTRS = { "viewportDensity", "viewportWidth", "viewportLength", + "renderDensity", + "renderWidth", + "renderLength", + "increaseRenderBounds" } +SKIP_ATTR_MESSAGE = ( + "Skipping updating %s.%s to %s because it " + "is considered a local overridable attribute. " + "Either set manually or the load the cache " + "anew." +) + def set_attribute(node, attr, value): """Wrapper of set attribute which ignores None values""" @@ -36,8 +48,8 @@ def set_attribute(node, attr, value): class YetiCacheLoader(load.LoaderPlugin): """Load Yeti Cache with one or more Yeti nodes""" - families = ["yeticache", "yetiRig"] - representations = ["fur"] + product_types = {"yeticache", "yetiRig"} + representations = {"fur"} label = "Load Yeti Cache" order = -9 @@ -56,15 +68,12 @@ class YetiCacheLoader(load.LoaderPlugin): """ - try: - product_type = context["representation"]["context"]["family"] - except ValueError: - product_type = "yeticache" + product_type = context["product"]["productType"] # Build namespace - asset = context["asset"] + folder_name = context["folder"]["name"] if namespace is None: - namespace = self.create_namespace(asset["name"]) + namespace = self.create_namespace(folder_name) # Ensure Yeti is loaded if not cmds.pluginInfo("pgYetiMaya", query=True, loaded=True): @@ -122,12 +131,12 @@ class YetiCacheLoader(load.LoaderPlugin): cmds.namespace(removeNamespace=namespace, deleteNamespaceContent=True) - def update(self, container, representation): - + def update(self, container, context): + repre_entity = context["representation"] namespace = container["namespace"] container_node = container["objectName"] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) settings = self.read_settings(path) # Collect scene information of asset @@ -212,26 +221,48 @@ class YetiCacheLoader(load.LoaderPlugin): for attr, value in node_settings["attrs"].items(): if attr in SKIP_UPDATE_ATTRS: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) continue set_attribute(attr, value, yeti_node) + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + was_value_set = create_yeti_variable( + yeti_shape_node=yeti_node, + attr_name=attr, + value=value, + # We do not want to update the + # value if it already exists so + # that any local overrides that + # may have been applied still + # persist + force_value=False + ) + if not was_value_set: + self.log.info( + SKIP_ATTR_MESSAGE, yeti_node, attr, value + ) + cmds.setAttr("{}.representation".format(container_node), - str(representation["_id"]), + repre_entity["id"], typ="string") - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) # helper functions - def create_namespace(self, asset): + def create_namespace(self, folder_name): """Create a unique namespace Args: asset (dict): asset information """ - asset_name = "{}_".format(asset) - prefix = "_" if asset_name[0].isdigit()else "" + asset_name = "{}_".format(folder_name) + prefix = "_" if asset_name[0].isdigit() else "" namespace = lib.unique_namespace( asset_name, prefix=prefix, @@ -335,6 +366,13 @@ class YetiCacheLoader(load.LoaderPlugin): for attr, value in attributes.items(): set_attribute(attr, value, yeti_node) + # Set up user defined attributes + user_variables = node_settings.get("user_variables", {}) + for attr, value in user_variables.items(): + create_yeti_variable(yeti_shape_node=yeti_node, + attr_name=attr, + value=value) + # Connect to the time node cmds.connectAttr("time1.outTime", "%s.currentTime" % yeti_node) diff --git a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py index 310c943198..7444566ee1 100644 --- a/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py +++ b/client/ayon_core/hosts/maya/plugins/load/load_yeti_rig.py @@ -1,20 +1,28 @@ +from typing import List + import maya.cmds as cmds from ayon_core.hosts.maya.api import plugin from ayon_core.hosts.maya.api import lib +from ayon_core.pipeline import registered_host +from ayon_core.pipeline.create import CreateContext + class YetiRigLoader(plugin.ReferenceLoader): """This loader will load Yeti rig.""" - families = ["yetiRig"] - representations = ["ma"] + product_types = {"yetiRig"} + representations = {"ma"} label = "Load Yeti Rig" order = -9 icon = "code-fork" color = "orange" + # From settings + create_cache_instance_on_load = True + def process_reference( self, context, name=None, namespace=None, options=None ): @@ -49,4 +57,41 @@ class YetiRigLoader(plugin.ReferenceLoader): ) self[:] = nodes + if self.create_cache_instance_on_load: + # Automatically create in instance to allow publishing the loaded + # yeti rig into a yeti cache + self._create_yeti_cache_instance(nodes, variant=namespace) + return nodes + + def _create_yeti_cache_instance(self, nodes: List[str], variant: str): + """Create a yeticache product type instance to publish the output. + + This is similar to how loading animation rig will automatically create + an animation instance for publishing any loaded character rigs, but + then for yeti rigs. + + Args: + nodes (List[str]): Nodes generated on load. + variant (str): Variant for the yeti cache instance to create. + + """ + + # Find the roots amongst the loaded nodes + yeti_nodes = cmds.ls(nodes, type="pgYetiMaya", long=True) + assert yeti_nodes, "No pgYetiMaya nodes in rig, this is a bug." + + self.log.info("Creating variant: {}".format(variant)) + + creator_identifier = "io.openpype.creators.maya.yeticache" + + host = registered_host() + create_context = CreateContext(host) + + with lib.maintained_selection(): + cmds.select(yeti_nodes, noExpand=True) + create_context.create( + creator_identifier=creator_identifier, + variant=variant, + pre_create_data={"use_selection": True} + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py index 2d621353e6..0db89bee31 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_arnold_scene_source.py @@ -46,11 +46,18 @@ class CollectArnoldSceneSource(pyblish.api.InstancePlugin): self.log.debug("data: {}".format(instance.data)) def get_hierarchy(self, nodes): - """Return nodes with all their children""" + """Return nodes with all their children. + + Arguments: + nodes (List[str]): List of nodes to collect children hierarchy for + + Returns: + list: Input nodes with their children hierarchy + + """ nodes = cmds.ls(nodes, long=True) if not nodes: return [] - children = get_all_children(nodes) - # Make sure nodes merged with children only - # contains unique entries - return list(set(nodes + children)) + + children = get_all_children(nodes, ignore_intermediate_objects=True) + return list(children.union(nodes)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py index 94fcc834e1..60853bd1ee 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_file_dependencies.py @@ -1,5 +1,3 @@ -import json - from maya import cmds import pyblish.api @@ -11,18 +9,24 @@ class CollectFileDependencies(pyblish.api.ContextPlugin): label = "Collect File Dependencies" order = pyblish.api.CollectorOrder - 0.49 hosts = ["maya"] + families = ["renderlayer"] + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if not used for deadline submission anyway + settings = project_settings["deadline"]["publish"]["MayaSubmitDeadline"] # noqa + cls.enabled = settings.get("asset_dependencies", True) def process(self, context): - dependencies = [] + dependencies = set() for node in cmds.ls(type="file"): path = cmds.getAttr("{}.{}".format(node, "fileTextureName")) if path not in dependencies: - dependencies.append(path) + dependencies.add(path) for node in cmds.ls(type="AlembicNode"): path = cmds.getAttr("{}.{}".format(node, "abc_File")) if path not in dependencies: - dependencies.append(path) + dependencies.add(path) - context.data["fileDependencies"] = dependencies - self.log.debug(json.dumps(dependencies, indent=4)) + context.data["fileDependencies"] = list(dependencies) diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_inputs.py b/client/ayon_core/hosts/maya/plugins/publish/collect_inputs.py index d0b1029a03..fa5a694a76 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_inputs.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_inputs.py @@ -79,12 +79,12 @@ def iter_history(nodes, def collect_input_containers(containers, nodes): """Collect containers that contain any of the node in `nodes`. - This will return any loaded Avalon container that contains at least one of - the nodes. As such, the Avalon container is an input for it. Or in short, + This will return any loaded AYON container that contains at least one of + the nodes. As such, the AYON container is an input for it. Or in short, there are member nodes of that container. Returns: - list: Input avalon containers + list: Input loaded containers """ # Assume the containers have collected their cached '_members' data @@ -172,7 +172,7 @@ class CollectUpstreamInputs(pyblish.api.InstancePlugin): """Collects inputs from nodes in renderlayer, incl. shaders + camera""" # Get the renderlayer - renderlayer = instance.data.get("renderlayer") + renderlayer = instance.data.get("setMembers") if renderlayer == "defaultRenderLayer": # Assume all loaded containers in the scene are inputs diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py index 85be15bb7b..774c217cfd 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_instances.py @@ -48,15 +48,15 @@ class CollectNewInstances(pyblish.api.InstancePlugin): # Collect members members = cmds.ls(members, long=True) or [] + # Collect full hierarchy dag_members = cmds.ls(members, type="dagNode", long=True) - children = get_all_children(dag_members) - children = cmds.ls(children, noIntermediate=True, long=True) - parents = ( - self.get_all_parents(members) - if creator_attributes.get("includeParentHierarchy", True) - else [] - ) - members_hierarchy = list(set(members + children + parents)) + children = get_all_children(dag_members, + ignore_intermediate_objects=True) + + members_hierarchy = set(members) + members_hierarchy.update(children) + if creator_attributes.get("includeParentHierarchy", True): + members_hierarchy.update(self.get_all_parents(dag_members)) instance[:] = members_hierarchy @@ -97,16 +97,16 @@ class CollectNewInstances(pyblish.api.InstancePlugin): """Get all parents by using string operations (optimization) Args: - nodes (list): the nodes which are found in the objectSet + nodes (iterable): the nodes which are found in the objectSet Returns: - list + set """ - parents = [] + parents = set() for node in nodes: splitted = node.split("|") items = ["|".join(splitted[0:i]) for i in range(2, len(splitted))] - parents.extend(items) + parents.update(items) - return list(set(parents)) + return parents diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_look.py b/client/ayon_core/hosts/maya/plugins/publish/collect_look.py index 00e1855b19..a3a32bc0cb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_look.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_look.py @@ -8,7 +8,7 @@ from maya import cmds # noqa import pyblish.api from ayon_core.hosts.maya.api import lib -SHAPE_ATTRS = ["castsShadows", +SHAPE_ATTRS = {"castsShadows", "receiveShadows", "motionBlur", "primaryVisibility", @@ -16,8 +16,7 @@ SHAPE_ATTRS = ["castsShadows", "visibleInReflections", "visibleInRefractions", "doubleSided", - "opposite"] -SHAPE_ATTRS = set(SHAPE_ATTRS) + "opposite"} def get_pxr_multitexture_file_attrs(node): @@ -59,9 +58,8 @@ for node_type in list(FILE_NODES.keys()): if node_type not in all_node_types: FILE_NODES.pop(node_type) -for node_type in RENDER_SET_TYPES: - if node_type not in all_node_types: - RENDER_SET_TYPES.remove(node_type) +RENDER_SET_TYPES = [node_type for node_type in RENDER_SET_TYPES + if node_type in all_node_types] del all_node_types # Cache pixar dependency node types so we can perform a type lookup against it @@ -109,8 +107,7 @@ def get_look_attrs(node): if cmds.objectType(node, isAType="shape"): attrs = cmds.listAttr(node, changedSinceFileOpen=True) or [] for attr in attrs: - if attr in SHAPE_ATTRS or \ - attr not in SHAPE_ATTRS and attr.startswith('ai'): + if attr in SHAPE_ATTRS or attr.startswith('ai'): result.append(attr) return result @@ -290,7 +287,6 @@ class CollectLook(pyblish.api.InstancePlugin): families = ["look"] label = "Collect Look" hosts = ["maya"] - maketx = True def process(self, instance): """Collect the Look in the instance with the correct layer settings""" @@ -302,15 +298,12 @@ class CollectLook(pyblish.api.InstancePlugin): """Collect looks. Args: - instance: Instance to collect. + instance (pyblish.api.Instance): Instance to collect. """ self.log.debug("Looking for look associations " "for %s" % instance.data['name']) - # Lookup set (optimization) - instance_lookup = set(cmds.ls(instance, long=True)) - # Discover related object sets self.log.debug("Gathering sets ...") sets = self.collect_sets(instance) @@ -351,75 +344,15 @@ class CollectLook(pyblish.api.InstancePlugin): # Collect file nodes used by shading engines (if we have any) files = [] look_sets = list(sets.keys()) - shader_attrs = [ - "surfaceShader", - "volumeShader", - "displacementShader", - "aiSurfaceShader", - "aiVolumeShader", - "rman__surface", - "rman__displacement" - ] if look_sets: self.log.debug("Found look sets: {}".format(look_sets)) - - # Get all material attrs for all look sets to retrieve their inputs - existing_attrs = [] - for look in look_sets: - for attr in shader_attrs: - if cmds.attributeQuery(attr, node=look, exists=True): - existing_attrs.append("{}.{}".format(look, attr)) - - materials = cmds.listConnections(existing_attrs, - source=True, - destination=False) or [] - - self.log.debug("Found materials:\n{}".format(materials)) - - self.log.debug("Found the following sets:\n{}".format(look_sets)) - # Get the entire node chain of the look sets - # history = cmds.listHistory(look_sets, allConnections=True) - # if materials list is empty, listHistory() will crash with - # RuntimeError - history = set() - if materials: - history = set( - cmds.listHistory(materials, allConnections=True)) - - # Since we retrieved history only of the connected materials - # connected to the look sets above we now add direct history - # for some of the look sets directly - # handling render attribute sets - - # Maya (at least 2024) crashes with Warning when render set type - # isn't available. cmds.ls() will return empty list - if RENDER_SET_TYPES: - render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES) - if render_sets: - history.update( - cmds.listHistory(render_sets, - future=False, - pruneDagObjects=True) - or [] - ) - - # Ensure unique entries only - history = list(history) - - files = cmds.ls(history, - # It's important only node types are passed that - # exist (e.g. for loaded plugins) because otherwise - # the result will turn back empty - type=list(FILE_NODES.keys()), - long=True) - - # Sort for log readability - files.sort() + files = self.collect_file_nodes(look_sets) self.log.debug("Collected file nodes:\n{}".format(files)) - # Collect textures if any file nodes are found + + # Collect texture resources if any file nodes are found resources = [] - for node in files: # sort for log readability + for node in files: resources.extend(self.collect_resources(node)) instance.data["resources"] = resources self.log.debug("Collected resources: {}".format(resources)) @@ -439,6 +372,78 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.debug("Collected look for %s" % instance) + def collect_file_nodes(self, look_sets): + """Get the entire node chain of the look sets and return file nodes + + Arguments: + look_sets (List[str]): List of sets and shading engines relevant + to the look. + + Returns: + List[str]: List of file node names. + + """ + + shader_attrs = [ + "surfaceShader", + "volumeShader", + "displacementShader", + "aiSurfaceShader", + "aiVolumeShader", + "rman__surface", + "rman__displacement" + ] + + # Get all material attrs for all look sets to retrieve their inputs + existing_attrs = [] + for look_set in look_sets: + for attr in shader_attrs: + if cmds.attributeQuery(attr, node=look_set, exists=True): + existing_attrs.append("{}.{}".format(look_set, attr)) + + materials = cmds.listConnections(existing_attrs, + source=True, + destination=False) or [] + + self.log.debug("Found materials:\n{}".format(materials)) + + # Get the entire node chain of the look sets + # history = cmds.listHistory(look_sets, allConnections=True) + # if materials list is empty, listHistory() will crash with + # RuntimeError + history = set() + if materials: + history.update(cmds.listHistory(materials, allConnections=True)) + + # Since we retrieved history only of the connected materials connected + # to the look sets above we now add direct history for some of the + # look sets directly handling render attribute sets + + # Maya (at least 2024) crashes with Warning when render set type + # isn't available. cmds.ls() will return empty list + if RENDER_SET_TYPES: + render_sets = cmds.ls(look_sets, type=RENDER_SET_TYPES) + if render_sets: + history.update( + cmds.listHistory(render_sets, + future=False, + pruneDagObjects=True) + or [] + ) + + # Get file nodes in the material history + files = cmds.ls(list(history), + # It's important only node types are passed that + # exist (e.g. for loaded plugins) because otherwise + # the result will turn back empty + type=list(FILE_NODES.keys()), + long=True) + + # Sort for log readability + files.sort() + + return files + def collect_sets(self, instance): """Collect all objectSets which are of importance for publishing @@ -446,7 +451,8 @@ class CollectLook(pyblish.api.InstancePlugin): which need to be Args: - instance (list): all nodes to be published + instance (pyblish.api.Instance): publish instance containing all + nodes to be published. Returns: dict @@ -624,7 +630,7 @@ class CollectLook(pyblish.api.InstancePlugin): "source": source, # required for resources "files": files, "color_space": color_space - } # required for resources + } class CollectModelRenderSets(CollectLook): @@ -639,13 +645,13 @@ class CollectModelRenderSets(CollectLook): families = ["model"] label = "Collect Model Render Sets" hosts = ["maya"] - maketx = True def collect_sets(self, instance): """Collect all related objectSets except shadingEngines Args: - instance (list): all nodes to be published + instance (pyblish.api.Instance): publish instance containing all + nodes to be published. Returns: dict @@ -661,7 +667,7 @@ class CollectModelRenderSets(CollectLook): if objset in sets: continue - if "shadingEngine" in cmds.nodeType(objset, inherited=True): + if cmds.objectType(objset, isAType="shadingEngine"): continue sets[objset] = {"uuid": lib.get_id(objset), "members": list()} diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_multiverse_look.py b/client/ayon_core/hosts/maya/plugins/publish/collect_multiverse_look.py index 31c0d0eaa1..83e743c92e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_multiverse_look.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_multiverse_look.py @@ -40,9 +40,11 @@ class _NodeTypeAttrib(object): return "{}.{}".format(node, self.colour_space) def __str__(self): - return "_NodeTypeAttrib(name={}, fname={}, " - "computed_fname={}, colour_space={})".format( - self.name, self.fname, self.computed_fname, self.colour_space) + return ( + "_NodeTypeAttrib(name={}, fname={}, " + "computed_fname={}, colour_space={})".format( + self.name, self.fname, self.computed_fname, self.colour_space) + ) NODETYPES = { diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py index 19e0c133c4..21095935a2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_render.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_render.py @@ -1,24 +1,19 @@ # -*- coding: utf-8 -*- """Collect render data. -This collector will go through render layers in maya and prepare all data -needed to create instances and their representations for submission and -publishing on farm. +This collector will go through renderlayer instances and prepare all data +needed to detect the expected rendered files for a layer, with resolution, +frame ranges and collects the data needed for publishing on the farm. Requires: instance -> families - instance -> setMembers - instance -> folderPath context -> currentFile - context -> workspaceDir context -> user -Optional: - Provides: instance -> label - instance -> productName + instance -> subset instance -> attachTo instance -> setMembers instance -> publish @@ -26,6 +21,8 @@ Provides: instance -> frameEnd instance -> byFrameStep instance -> renderer + instance -> family + instance -> asset instance -> time instance -> author instance -> source @@ -71,14 +68,11 @@ class CollectMayaRender(pyblish.api.InstancePlugin): # TODO: Re-add force enable of workfile instance? # TODO: Re-add legacy layer support with LAYER_ prefix but in Creator - # TODO: Set and collect active state of RenderLayer in Creator using - # renderlayer.isRenderable() context = instance.context layer = instance.data["transientData"]["layer"] objset = instance.data.get("instance_node") filepath = context.data["currentFile"].replace("\\", "/") - workspace = context.data["workspaceDir"] # check if layer is renderable if not layer.isRenderable(): @@ -113,7 +107,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin): except UnsupportedRendererException as exc: raise KnownPublishError(exc) render_products = layer_render_products.layer_data.products - assert render_products, "no render products generated" + if not render_products: + self.log.error( + "No render products generated for '%s'. You might not have " + "any render camera in the renderlayer or render end frame is " + "lower than start frame.", + instance.name + ) expected_files = [] multipart = False for product in render_products: @@ -131,16 +131,21 @@ class CollectMayaRender(pyblish.api.InstancePlugin): }) has_cameras = any(product.camera for product in render_products) - assert has_cameras, "No render cameras found." - - self.log.debug("multipart: {}".format( - multipart)) - assert expected_files, "no file names were generated, this is a bug" - self.log.debug( - "expected files: {}".format( - json.dumps(expected_files, indent=4, sort_keys=True) + if render_products and not has_cameras: + self.log.error( + "No render cameras found for: %s", + instance ) - ) + if not expected_files: + self.log.warning( + "No file names were generated, this is a bug.") + + for render_product in render_products: + self.log.debug(render_product) + self.log.debug("multipart: {}".format(multipart)) + self.log.debug("expected files: {}".format( + json.dumps(expected_files, indent=4, sort_keys=True) + )) # if we want to attach render to product, check if we have AOV's # in expectedFiles. If so, raise error as we cannot attach AOV @@ -152,14 +157,14 @@ class CollectMayaRender(pyblish.api.InstancePlugin): ) # append full path - aov_dict = {} image_directory = os.path.join( cmds.workspace(query=True, rootDirectory=True), cmds.workspace(fileRuleEntry="images") ) # replace relative paths with absolute. Render products are # returned as list of dictionaries. - publish_meta_path = None + publish_meta_path = "NOT-SET" + aov_dict = {} for aov in expected_files: full_paths = [] aov_first_key = list(aov.keys())[0] @@ -170,14 +175,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin): publish_meta_path = os.path.dirname(full_path) aov_dict[aov_first_key] = full_paths full_exp_files = [aov_dict] - self.log.debug(full_exp_files) - - if publish_meta_path is None: - raise KnownPublishError("Unable to detect any expected output " - "images for: {}. Make sure you have a " - "renderable camera and a valid frame " - "range set for your renderlayer." - "".format(instance.name)) frame_start_render = int(self.get_render_attribute( "startFrame", layer=layer_name)) @@ -223,7 +220,8 @@ class CollectMayaRender(pyblish.api.InstancePlugin): common_publish_meta_path = "/" + common_publish_meta_path self.log.debug( - "Publish meta path: {}".format(common_publish_meta_path)) + "Publish meta path: {}".format(common_publish_meta_path) + ) # Get layer specific settings, might be overrides colorspace_data = lib.get_color_management_preferences() @@ -290,8 +288,8 @@ class CollectMayaRender(pyblish.api.InstancePlugin): "colorspaceView": colorspace_data["view"], } - rr_settings = context.data["project_settings"]["royalrender"] - if rr_settings["enabled"]: + manager = context.data["ayonAddonsManager"] + if manager.get_enabled_addon("royalrender") is not None: data["rrPathName"] = instance.data.get("rrPathName") self.log.debug(data["rrPathName"]) @@ -314,7 +312,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin): if not extend_frames: instance.data["overrideExistingFrame"] = False - # Update the instace + # Update the instance instance.data.update(data) @staticmethod diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_review.py b/client/ayon_core/hosts/maya/plugins/publish/collect_review.py index 58d02294c5..4e35b3bcc2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_review.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_review.py @@ -1,8 +1,8 @@ from maya import cmds, mel +import ayon_api import pyblish.api -from ayon_core.client import get_subset_by_name from ayon_core.pipeline import KnownPublishError from ayon_core.hosts.maya.api import lib @@ -67,7 +67,7 @@ class CollectReview(pyblish.api.InstancePlugin): reviewable_product = reviewable_products[0] self.log.debug( - "Subset attached to review: {}".format(reviewable_product) + "Product attached to review: {}".format(reviewable_product) ) # Find the relevant publishing instance in the current context @@ -117,16 +117,16 @@ class CollectReview(pyblish.api.InstancePlugin): else: project_name = instance.context.data["projectName"] - asset_doc = instance.context.data['assetEntity'] + folder_entity = instance.context.data["folderEntity"] task = instance.context.data["task"] legacy_product_name = task + 'Review' - subset_doc = get_subset_by_name( + product_entity = ayon_api.get_product_by_name( project_name, legacy_product_name, - asset_doc["_id"], - fields=["_id"] + folder_entity["id"], + fields={"id"} ) - if subset_doc: + if product_entity: self.log.debug("Existing products found, keep legacy name.") instance.data["productName"] = legacy_product_name diff --git a/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py b/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py index 067a7bc532..e1755e4212 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/collect_yeti_cache.py @@ -3,6 +3,7 @@ from maya import cmds import pyblish.api from ayon_core.hosts.maya.api import lib +from ayon_core.hosts.maya.api.yeti import get_yeti_user_variables SETTINGS = { @@ -34,7 +35,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin): - "increaseRenderBounds" - "imageSearchPath" - Other information is the name of the transform and it's Colorbleed ID + Other information is the name of the transform and its `cbId` """ order = pyblish.api.CollectorOrder + 0.45 @@ -54,6 +55,16 @@ class CollectYetiCache(pyblish.api.InstancePlugin): # Get specific node attributes attr_data = {} for attr in SETTINGS: + # Ignore non-existing attributes with a warning, e.g. cbId + # if they have not been generated yet + if not cmds.attributeQuery(attr, node=shape, exists=True): + self.log.warning( + "Attribute '{}' not found on Yeti node: {}".format( + attr, shape + ) + ) + continue + current = cmds.getAttr("%s.%s" % (shape, attr)) # change None to empty string as Maya doesn't support # NoneType in attributes @@ -61,6 +72,12 @@ class CollectYetiCache(pyblish.api.InstancePlugin): current = "" attr_data[attr] = current + # Get user variable attributes + user_variable_attrs = { + attr: lib.get_attribute("{}.{}".format(shape, attr)) + for attr in get_yeti_user_variables(shape) + } + # Get transform data parent = cmds.listRelatives(shape, parent=True)[0] transform_data = {"name": parent, "cbId": lib.get_id(parent)} @@ -70,6 +87,7 @@ class CollectYetiCache(pyblish.api.InstancePlugin): "name": shape, "cbId": lib.get_id(shape), "attrs": attr_data, + "user_variables": user_variable_attrs } settings["nodes"].append(shape_data) diff --git a/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py b/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py index 47fb4f03fe..5b597f2707 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py +++ b/client/ayon_core/hosts/maya/plugins/publish/determine_future_version.py @@ -5,7 +5,7 @@ class DetermineFutureVersion(pyblish.api.InstancePlugin): """ This will determine version of product if we want render to be attached to. """ - label = "Determine Subset Version" + label = "Determine Product Version" order = pyblish.api.IntegratorOrder hosts = ["maya"] families = ["renderlayer"] diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py index c4af2914cd..cb3951ec0c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_camera_mayaScene.py @@ -299,4 +299,10 @@ def transfer_image_planes(source_cameras, target_cameras, def _attach_image_plane(camera, image_plane): cmds.imagePlane(image_plane, edit=True, detach=True) + + # Attaching to a camera resets it to identity size, so we counter that + size_x = cmds.getAttr(f"{image_plane}.sizeX") + size_y = cmds.getAttr(f"{image_plane}.sizeY") cmds.imagePlane(image_plane, edit=True, camera=camera) + cmds.setAttr(f"{image_plane}.sizeX", size_x) + cmds.setAttr(f"{image_plane}.sizeY", size_y) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py index 19825b769c..4b293b5785 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_gpu_cache.py @@ -5,7 +5,8 @@ from maya import cmds from ayon_core.pipeline import publish -class ExtractGPUCache(publish.Extractor): +class ExtractGPUCache(publish.Extractor, + publish.OptionalPyblishPluginMixin): """Extract the content of the instance to a GPU cache file.""" label = "GPU Cache" @@ -20,6 +21,9 @@ class ExtractGPUCache(publish.Extractor): useBaseTessellation = True def process(self, instance): + if not self.is_active(instance.data): + return + cmds.loadPlugin("gpuCache", quiet=True) staging_dir = self.staging_dir(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py b/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py index 441610b749..b025a1605a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_layout.py @@ -4,8 +4,8 @@ import json from maya import cmds from maya.api import OpenMaya as om +from ayon_api import get_representation_by_id -from ayon_core.client import get_representation_by_id from ayon_core.pipeline import publish @@ -44,6 +44,8 @@ class ExtractLayout(publish.Extractor): grp_loaded_ass = instance.data.get("groupLoadedAssets", False) if grp_loaded_ass: asset_list = cmds.listRelatives(asset, children=True) + # WARNING This does override 'asset' variable from parent loop + # is it correct? for asset in asset_list: grp_name = asset.split(':')[0] else: @@ -61,13 +63,18 @@ class ExtractLayout(publish.Extractor): representation = get_representation_by_id( project_name, representation_id, - fields=["parent", "context.family"] + fields={"versionId", "context"} ) self.log.debug(representation) - version_id = representation.get("parent") - product_type = representation.get("context").get("family") + version_id = representation["versionId"] + # TODO use product entity to get product type rather than + # data in representation 'context' + repre_context = representation["context"] + product_type = repre_context.get("product", {}).get("type") + if not product_type: + product_type = repre_context.get("family") json_element = { "product_type": product_type, diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_look.py b/client/ayon_core/hosts/maya/plugins/publish/extract_look.py index 469608100d..2a86b20131 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_look.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_look.py @@ -1,16 +1,17 @@ # -*- coding: utf-8 -*- """Maya look extractor.""" +import os import sys -from abc import ABCMeta, abstractmethod -from collections import OrderedDict import contextlib import json import logging -import os import tempfile +import platform +from abc import ABCMeta, abstractmethod +from collections import OrderedDict + import six import attr - import pyblish.api from maya import cmds # noqa @@ -105,10 +106,10 @@ class TextureProcessor: self.log = log def apply_settings(self, project_settings): - """Apply OpenPype system/project settings to the TextureProcessor + """Apply AYON system/project settings to the TextureProcessor Args: - project_settings (dict): OpenPype project settings + project_settings (dict): AYON project settings Returns: None @@ -277,7 +278,7 @@ class MakeTX(TextureProcessor): """Process the texture. This function requires the `maketx` executable to be available in an - OpenImageIO toolset detectable by OpenPype. + OpenImageIO toolset detectable by AYON. Args: source (str): Path to source file. diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py index f2187063fc..5de72f7674 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_pointcache.py @@ -26,6 +26,10 @@ class ExtractAlembic(publish.Extractor): families = ["pointcache", "model", "vrayproxy.alembic"] targets = ["local", "remote"] + # From settings + bake_attributes = [] + bake_attribute_prefixes = [] + def process(self, instance): if instance.data.get("farm"): self.log.debug("Should be processed on farm, skipping.") @@ -40,10 +44,12 @@ class ExtractAlembic(publish.Extractor): attrs = instance.data.get("attr", "").split(";") attrs = [value for value in attrs if value.strip()] attrs += instance.data.get("userDefinedAttributes", []) + attrs += self.bake_attributes attrs += ["cbId"] attr_prefixes = instance.data.get("attrPrefix", "").split(";") attr_prefixes = [value for value in attr_prefixes if value.strip()] + attr_prefixes += self.bake_attribute_prefixes self.log.debug("Extracting pointcache..") dirname = self.staging_dir(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py b/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py index 9286869c60..66dd805437 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_redshift_proxy.py @@ -5,7 +5,13 @@ import os from maya import cmds from ayon_core.pipeline import publish -from ayon_core.hosts.maya.api.lib import maintained_selection +from ayon_core.hosts.maya.api.lib import ( + maintained_selection, + renderlayer +) +from ayon_core.hosts.maya.api.render_setup_tools import ( + allow_export_from_render_setup_layer +) class ExtractRedshiftProxy(publish.Extractor): @@ -18,6 +24,9 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): """Extractor entry point.""" + # Make sure Redshift is loaded + cmds.loadPlugin("redshift4maya", quiet=True) + staging_dir = self.staging_dir(instance) file_name = "{}.rs".format(instance.name) file_path = os.path.join(staging_dir, file_name) @@ -60,14 +69,22 @@ class ExtractRedshiftProxy(publish.Extractor): # Write out rs file self.log.debug("Writing: '%s'" % file_path) + + # Allow overriding what renderlayer to export from. By default force + # it to the default render layer. (Note that the renderlayer isn't + # currently exposed as an attribute to artists) + layer = instance.data.get("renderLayer", "defaultRenderLayer") + with maintained_selection(): - cmds.select(instance.data["setMembers"], noExpand=True) - cmds.file(file_path, - pr=False, - force=True, - type="Redshift Proxy", - exportSelected=True, - options=rs_options) + with renderlayer(layer): + with allow_export_from_render_setup_layer(): + cmds.select(instance.data["setMembers"], noExpand=True) + cmds.file(file_path, + preserveReferences=False, + force=True, + type="Redshift Proxy", + exportSelected=True, + options=rs_options) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py index 8b88bfb9f8..1a389f3d33 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Create Unreal Skeletal Mesh data to be extracted as FBX.""" import os -from contextlib import contextmanager from maya import cmds # noqa diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py index edbb5f845e..6292afcf41 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py @@ -74,7 +74,7 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor): renamed_to_extract.append("|".join(node_path)) with renamed(original_parent, parent_node): - self.log.debug("Extracting: {}".format(renamed_to_extract, path)) + self.log.debug("Extracting: {}".format(renamed_to_extract)) fbx_exporter.export(renamed_to_extract, path) if "representations" not in instance.data: diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py index 9a264959d1..9a6b4ebaed 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_unreal_yeticache.py @@ -5,13 +5,13 @@ from maya import cmds from ayon_core.pipeline import publish -class ExtractYetiCache(publish.Extractor): +class ExtractUnrealYetiCache(publish.Extractor): """Producing Yeti cache files using scene time range. This will extract Yeti cache file sequence and fur settings. """ - label = "Extract Yeti Cache" + label = "Extract Yeti Cache (Unreal)" hosts = ["maya"] families = ["yeticacheUE"] diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py index 9aaba532b2..d799486184 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_workfile_xgen.py @@ -7,7 +7,6 @@ from maya import cmds import pyblish.api from ayon_core.hosts.maya.api.lib import extract_alembic from ayon_core.pipeline import publish -from ayon_core.lib import StringTemplate class ExtractWorkfileXgen(publish.Extractor): @@ -128,9 +127,11 @@ class ExtractWorkfileXgen(publish.Extractor): alembic_files.append(alembic_file) template_data = copy.deepcopy(instance.data["anatomyData"]) - published_maya_path = StringTemplate( - instance.context.data["anatomy"].templates["publish"]["file"] - ).format(template_data) + anatomy = instance.context.data["anatomy"] + publish_template = anatomy.get_template_item( + "publish", "default", "file" + ) + published_maya_path = publish_template.format(template_data) published_basename, _ = os.path.splitext(published_maya_path) for source in alembic_files: diff --git a/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py index ee864bd89b..b672089a63 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/extract_xgen.py @@ -9,7 +9,6 @@ from ayon_core.pipeline import publish from ayon_core.hosts.maya.api.lib import ( maintained_selection, attribute_values, write_xgen_file, delete_after ) -from ayon_core.lib import StringTemplate class ExtractXgen(publish.Extractor): @@ -39,8 +38,9 @@ class ExtractXgen(publish.Extractor): # Get published xgen file name. template_data = copy.deepcopy(instance.data["anatomyData"]) template_data.update({"ext": "xgen"}) - templates = instance.context.data["anatomy"].templates["publish"] - xgen_filename = StringTemplate(templates["file"]).format(template_data) + anatomy = instance.context.data["anatomy"] + file_template = anatomy.get_template_item("publish", "default", "file") + xgen_filename = file_template.format(template_data) xgen_path = os.path.join( self.staging_dir(instance), xgen_filename diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml new file mode 100644 index 0000000000..a855dd90a5 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_animation_out_set_related_node_ids.xml @@ -0,0 +1,29 @@ + + + +Shape IDs mismatch original shape +## Shapes mismatch IDs with original shape + +Meshes are detected where the (deformed) mesh has a different `cbId` than +the same mesh in its deformation history. +Theses should normally be the same. + +### How to repair? + +By using the repair action the IDs from the shape in history will be +copied to the deformed shape. For **animation** instances using the +repair action usually is usually the correct fix. + + + +### How does this happen? + +When a deformer is applied in the scene on a referenced mesh that had no +deformers then Maya will create a new shape node for the mesh that +does not have the original id. Then on scene save new ids get created for the +meshes lacking a `cbId` and thus the mesh then has a different `cbId` than +the mesh in the deformation history. + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml new file mode 100644 index 0000000000..5aec3009a7 --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_mesh_non_manifold.xml @@ -0,0 +1,33 @@ + + + +Non-Manifold Edges/Vertices +## Non-Manifold Edges/Vertices + +Meshes found with non-manifold edges or vertices. + +### How to repair? + +Run select invalid to select the invalid components. + +You can also try the _cleanup matching polygons_ action which will perform a +cleanup like Maya's `Mesh > Cleanup...` modeling tool. + +It is recommended to always select the invalid to see where the issue is +because if you run any repair on it you will need to double check the topology +is still like you wanted. + + + +### What is non-manifold topology? + +_Non-manifold topology_ polygons have a configuration that cannot be unfolded +into a continuous flat piece, for example: + +- Three or more faces share an edge +- Two or more faces share a single vertex but no edge. +- Adjacent faces have opposite normals + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/help/validate_rig_out_set_node_ids.xml b/client/ayon_core/hosts/maya/plugins/publish/help/validate_rig_out_set_node_ids.xml new file mode 100644 index 0000000000..374b8e59ae --- /dev/null +++ b/client/ayon_core/hosts/maya/plugins/publish/help/validate_rig_out_set_node_ids.xml @@ -0,0 +1,32 @@ + + + +Shape IDs mismatch original shape +## Shapes mismatch IDs with original shape + +Meshes are detected in the **rig** where the (deformed) mesh has a different +`cbId` than the same mesh in its deformation history. +Theses should normally be the same. + +### How to repair? + +By using the repair action the IDs from the shape in history will be +copied to the deformed shape. For rig instances, in many cases the +correct fix is to use the repair action **unless** you explicitly tried +to update the `cbId` values on the meshes - in that case you actually want +to do to the reverse and copy the IDs from the deformed mesh to the history +mesh instead. + + + +### How does this happen? + +When a deformer is applied in the scene on a referenced mesh that had no +deformers then Maya will create a new shape node for the mesh that +does not have the original id. Then on scene save new ids get created for the +meshes lacking a `cbId` and thus the mesh then has a different `cbId` than +the mesh in the deformation history. + + + + diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_animated_reference.py b/client/ayon_core/hosts/maya/plugins/publish/validate_animated_reference.py index b7f115b38f..2ba2bff6fc 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_animated_reference.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_animated_reference.py @@ -2,12 +2,14 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( PublishValidationError, - ValidateContentsOrder + ValidateContentsOrder, + OptionalPyblishPluginMixin ) from maya import cmds -class ValidateAnimatedReferenceRig(pyblish.api.InstancePlugin): +class ValidateAnimatedReferenceRig(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate all nodes in skeletonAnim_SET are referenced""" order = ValidateContentsOrder @@ -16,8 +18,11 @@ class ValidateAnimatedReferenceRig(pyblish.api.InstancePlugin): label = "Animated Reference Rig" accepted_controllers = ["transform", "locator"] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): + if not self.is_active(instance.data): + return animated_sets = instance.data.get("animated_skeleton", []) if not animated_sets: self.log.debug( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py index 8cf5c4278e..ea989bbcf3 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_content.py @@ -2,11 +2,13 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( PublishValidationError, - ValidateContentsOrder + ValidateContentsOrder, + OptionalPyblishPluginMixin ) -class ValidateAnimationContent(pyblish.api.InstancePlugin): +class ValidateAnimationContent(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Adheres to the content of 'animation' product type - Must have collected `out_hierarchy` data. @@ -19,6 +21,7 @@ class ValidateAnimationContent(pyblish.api.InstancePlugin): families = ["animation"] label = "Animation Content" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False @classmethod def get_invalid(cls, instance): @@ -48,6 +51,8 @@ class ValidateAnimationContent(pyblish.api.InstancePlugin): return invalid def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 0adb0a201c..7ecd602662 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -6,11 +6,15 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishXmlValidationError, + OptionalPyblishPluginMixin, + get_plugin_settings, + apply_plugin_settings_automatically ) -class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): +class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate if deformed shapes have related IDs to the original shapes When a deformer is applied in the scene on a referenced mesh that already @@ -28,48 +32,63 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction ] + optional = False + + @classmethod + def apply_settings(cls, project_settings): + # Preserve automatic settings applying logic + settings = get_plugin_settings(plugin=cls, + project_settings=project_settings, + log=cls.log, + category="maya") + apply_plugin_settings_automatically(cls, settings, logger=cls.log) + + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return def process(self, instance): """Process all meshes""" - + if not self.is_active(instance.data): + return # Ensure all nodes have a cbId and a related ID to the original shapes # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - # TODO: Message formatting can be improved - raise PublishValidationError("Nodes found with mismatching " - "IDs: {0}".format(invalid), - title="Invalid node ids") + + # Use the short names + invalid = cmds.ls(invalid) + invalid.sort() + + # Construct a human-readable list + invalid = "\n".join("- {}".format(node) for node in invalid) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Nodes have different IDs than their input " + "history: \n{0}".format(invalid) + ) + ) @classmethod def get_invalid(cls, instance): """Get all nodes which do not match the criteria""" invalid = [] - types_to_skip = ["locator"] + types = ["mesh", "nurbsCurve", "nurbsSurface"] # get asset id nodes = instance.data.get("out_hierarchy", instance[:]) - for node in nodes: + for node in cmds.ls(nodes, type=types, long=True): # We only check when the node is *not* referenced if cmds.referenceQuery(node, isNodeReferenced=True): continue - # Check if node is a shape as deformers only work on shapes - obj_type = cmds.objectType(node, isAType="shape") - if not obj_type: - continue - - # Skip specific types - if cmds.objectType(node) in types_to_skip: - continue - # Get the current id of the node node_id = lib.get_id(node) - if not node_id: - invalid.append(node) - continue history_id = lib.get_id_from_sibling(node) if history_id is not None and node_id != history_id: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py b/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py index f50fa1ed41..a9d896952d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_arnold_scene_source_cbid.py @@ -1,11 +1,15 @@ import pyblish.api from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( - ValidateContentsOrder, PublishValidationError, RepairAction + ValidateContentsOrder, + PublishValidationError, + RepairAction, + OptionalPyblishPluginMixin ) -class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin): +class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate Arnold Scene Source Cbid. It is required for the proxy and content nodes to share the same cbid. @@ -16,6 +20,14 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin): families = ["ass"] label = "Validate Arnold Scene Source CBID" actions = [RepairAction] + optional = False + + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return @staticmethod def _get_nodes_by_name(nodes): @@ -55,6 +67,8 @@ class ValidateArnoldSceneSourceCbid(pyblish.api.InstancePlugin): return invalid_couples def process(self, instance): + if not self.is_active(instance.data): + return # Proxy validation. if not instance.data.get("proxy", []): return diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_ass_relative_paths.py b/client/ayon_core/hosts/maya/plugins/publish/validate_ass_relative_paths.py index 669708d3a6..6e65eee592 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_ass_relative_paths.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_ass_relative_paths.py @@ -8,11 +8,13 @@ import pyblish.api from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateAssRelativePaths(pyblish.api.InstancePlugin): +class ValidateAssRelativePaths(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure exporting ass file has set relative texture paths""" order = ValidateContentsOrder @@ -20,8 +22,11 @@ class ValidateAssRelativePaths(pyblish.api.InstancePlugin): families = ['ass'] label = "ASS has relative texture paths" actions = [RepairAction] + optional = False def process(self, instance): + if not self.is_active(instance.data): + return # we cannot ask this until user open render settings as # `defaultArnoldRenderOptions` doesn't exist errors = [] diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_name.py b/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_name.py index 03fa0fd779..c829f4bf74 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_name.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_name.py @@ -2,11 +2,13 @@ import pyblish.api import maya.cmds as cmds import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateAssemblyName(pyblish.api.InstancePlugin): +class ValidateAssemblyName(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """ Ensure Assembly name ends with `GRP` Check if assembly name ends with `_GRP` string. @@ -17,6 +19,7 @@ class ValidateAssemblyName(pyblish.api.InstancePlugin): families = ["assembly"] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] active = False + optional = True @classmethod def get_invalid(cls, instance): @@ -47,7 +50,8 @@ class ValidateAssemblyName(pyblish.api.InstancePlugin): return invalid def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError("Found {} invalid named assembly " diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_namespaces.py b/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_namespaces.py index 2d3d8e71ac..814a8295c4 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_namespaces.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_namespaces.py @@ -1,10 +1,12 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateAssemblyNamespaces(pyblish.api.InstancePlugin): +class ValidateAssemblyNamespaces(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure namespaces are not nested In the outliner an item in a normal namespace looks as following: @@ -20,9 +22,11 @@ class ValidateAssemblyNamespaces(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder families = ["assembly"] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): - + if not self.is_active(instance.data): + return self.log.debug("Checking namespace for %s" % instance.name) if self.get_invalid(instance): raise PublishValidationError("Nested namespaces found") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_transforms.py b/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_transforms.py index 5069feb4b6..3bcae5de49 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_transforms.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_assembly_transforms.py @@ -2,10 +2,15 @@ import pyblish.api from maya import cmds import ayon_core.hosts.maya.api.action -from ayon_core.pipeline.publish import PublishValidationError, RepairAction +from ayon_core.pipeline.publish import ( + PublishValidationError, + RepairAction, + OptionalPyblishPluginMixin +) -class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): +class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Verify only root nodes of the loaded asset have transformations. Note: This check is temporary and is subject to change. @@ -34,7 +39,11 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin): " This can alter the look of your scene. " "Are you sure you want to continue?") + optional = False + def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_camera_attributes.py b/client/ayon_core/hosts/maya/plugins/publish/validate_camera_attributes.py index 5e940a48a9..5fd8772a96 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_camera_attributes.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_camera_attributes.py @@ -3,10 +3,14 @@ from maya import cmds import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( - PublishValidationError, ValidateContentsOrder) + PublishValidationError, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) -class ValidateCameraAttributes(pyblish.api.InstancePlugin): +class ValidateCameraAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates Camera has no invalid attribute keys or values. The Alembic file format does not a specific subset of attributes as such @@ -20,6 +24,7 @@ class ValidateCameraAttributes(pyblish.api.InstancePlugin): hosts = ['maya'] label = 'Camera Attributes' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = True DEFAULTS = [ ("filmFitOffset", 0.0), @@ -62,7 +67,8 @@ class ValidateCameraAttributes(pyblish.api.InstancePlugin): def process(self, instance): """Process all the nodes in the instance""" - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_camera_contents.py b/client/ayon_core/hosts/maya/plugins/publish/validate_camera_contents.py index 7d4c4341fd..0f14a057f9 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_camera_contents.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_camera_contents.py @@ -3,10 +3,13 @@ from maya import cmds import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( - PublishValidationError, ValidateContentsOrder) + PublishValidationError, + ValidateContentsOrder, + OptionalPyblishPluginMixin) -class ValidateCameraContents(pyblish.api.InstancePlugin): +class ValidateCameraContents(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates Camera instance contents. A Camera instance may only hold a SINGLE camera's transform, nothing else. @@ -22,6 +25,7 @@ class ValidateCameraContents(pyblish.api.InstancePlugin): label = 'Camera Contents' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] validate_shapes = True + optional = False @classmethod def get_invalid(cls, instance): @@ -71,7 +75,8 @@ class ValidateCameraContents(pyblish.api.InstancePlugin): def process(self, instance): """Process all the nodes in the instance""" - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError("Invalid camera contents: " diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py b/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py index e69717fad0..f70b46f89e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_color_sets.py @@ -10,7 +10,7 @@ from ayon_core.pipeline.publish import ( ) -class ValidateColorSets(pyblish.api.Validator, +class ValidateColorSets(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate all meshes in the instance have unlocked normals diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py b/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py index 55c4973842..045e22545c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_current_renderlayer_renderable.py @@ -1,11 +1,18 @@ +import inspect + import pyblish.api from maya import cmds -from ayon_core.pipeline.publish import context_plugin_should_run +from ayon_core.pipeline.publish import ( + context_plugin_should_run, + PublishValidationError, + OptionalPyblishPluginMixin +) -class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin): - """Validate if current render layer has a renderable camera +class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): + """Validate if current render layer has a renderable camera. There is a bug in Redshift which occurs when the current render layer at file open has no renderable camera. The error raised is as follows: @@ -20,15 +27,48 @@ class ValidateCurrentRenderLayerIsRenderable(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder hosts = ["maya"] families = ["renderlayer"] + optional = False def process(self, context): - + if not self.is_active(context.data): + return # Workaround bug pyblish-base#250 if not context_plugin_should_run(self, context): return - layer = cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True) + # This validator only makes sense when publishing renderlayer instances + # with Redshift. We skip validation if there isn't any. + if not any(self.is_active_redshift_render_instance(instance) + for instance in context): + return + cameras = cmds.ls(type="camera", long=True) renderable = any(c for c in cameras if cmds.getAttr(c + ".renderable")) - assert renderable, ("Current render layer '%s' has no renderable " - "camera" % layer) + if not renderable: + layer = cmds.editRenderLayerGlobals(query=True, + currentRenderLayer=True) + raise PublishValidationError( + "Current render layer '{}' has no renderable camera".format( + layer + ), + description=inspect.getdoc(self) + ) + + @staticmethod + def is_active_redshift_render_instance(instance) -> bool: + """Return whether instance is an active renderlayer instance set to + render with Redshift renderer.""" + if not instance.data.get("active", True): + return False + + # Check this before families just because it's a faster check + if not instance.data.get("renderer") == "redshift": + return False + + families = set() + families.add(instance.data.get("family")) + families.update(instance.data.get("families", [])) + if "renderlayer" not in families: + return False + + return True diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py b/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py index 5c5b691f9d..5736e726e9 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_frame_range.py @@ -80,7 +80,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, return if (inst_start != frame_start_handle): errors.append("Instance start frame [ {} ] doesn't " - "match the one set on asset [ {} ]: " + "match the one set on folder [ {} ]: " "{}/{}/{}/{} (handle/start/end/handle)".format( inst_start, frame_start_handle, @@ -89,7 +89,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, if (inst_end != frame_end_handle): errors.append("Instance end frame [ {} ] doesn't " - "match the one set on asset [ {} ]: " + "match the one set on folder [ {} ]: " "{}/{}/{}/{} (handle/start/end/handle)".format( inst_end, frame_end_handle, @@ -105,7 +105,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, for label, values in checks.items(): if values[0] != values[1]: errors.append( - "{} on instance ({}) does not match with the asset " + "{} on instance ({}) does not match with the folder " "({}).".format(label.title(), values[1], values[0]) ) @@ -119,7 +119,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin, @classmethod def repair(cls, instance): """ - Repair instance container to match asset data. + Repair instance container to match folder data. """ if "renderlayer" in instance.data.get("families"): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_material.py b/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_material.py index e610a8118c..3735dbb74c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_material.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_material.py @@ -6,10 +6,11 @@ from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder ) -from ayon_core.pipeline import PublishValidationError +from ayon_core.pipeline import PublishValidationError, OptionalPyblishPluginMixin -class ValidateGLSLMaterial(pyblish.api.InstancePlugin): +class ValidateGLSLMaterial(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """ Validate if the asset uses GLSL Shader """ @@ -23,6 +24,8 @@ class ValidateGLSLMaterial(pyblish.api.InstancePlugin): active = True def process(self, instance): + if not self.is_active(instance.data): + return shading_grp = self.get_material_from_shapes(instance) if not shading_grp: raise PublishValidationError("No shading group found") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_plugin.py b/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_plugin.py index e155315e4f..d783da8b5c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_plugin.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_glsl_plugin.py @@ -5,11 +5,13 @@ import pyblish.api from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateGLSLPlugin(pyblish.api.InstancePlugin): +class ValidateGLSLPlugin(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """ Validate if the asset uses GLSL Shader """ @@ -19,8 +21,11 @@ class ValidateGLSLPlugin(pyblish.api.InstancePlugin): hosts = ['maya'] label = 'maya2glTF plugin' actions = [RepairAction] + optional = False def process(self, instance): + if not self.is_active(instance.data): + return if not cmds.pluginInfo("maya2glTF", query=True, loaded=True): raise PublishValidationError("maya2glTF is not loaded") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py index 43b4f06e3f..e6f4b908bb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_in_context.py @@ -11,8 +11,6 @@ from ayon_core.pipeline.publish import ( OptionalPyblishPluginMixin ) -from maya import cmds - class ValidateInstanceInContext(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): @@ -37,22 +35,25 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, if not self.is_active(instance.data): return - asset = instance.data.get("folderPath") - context_asset = self.get_context_asset(instance) - if asset != context_asset: + folder_path = instance.data.get("folderPath") + task = instance.data.get("task") + context = self.get_context(instance) + if (folder_path, task) != context: + context_label = "{} > {}".format(*context) + instance_label = "{} > {}".format(folder_path, task) raise PublishValidationError( message=( - "Instance '{}' publishes to different asset than current " - "context: {}. Current context: {}".format( - instance.name, asset, context_asset + "Instance '{}' publishes to different context than current" + " context: {}. Current context: {}".format( + instance.name, instance_label, context_label ) ), description=( - "## Publishing to a different asset\n" + "## Publishing to a different context data\n" "There are publish instances present which are publishing " - "into a different asset than your current context.\n\n" + "into a different folder than your current context.\n\n" "Usually this is not what you want but there can be cases " - "where you might want to publish into another asset or " + "where you might want to publish into another folder or " "shot. If that's the case you can disable the validation " "on the instance to ignore it." ) @@ -64,14 +65,20 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin, @classmethod def repair(cls, instance): - context_asset = cls.get_context_asset(instance) - instance_node = instance.data["instance_node"] - cmds.setAttr( - "{}.folderPath".format(instance_node), - context_asset, - type="string" + context_folder_path, context_task = cls.get_context( + instance) + + create_context = instance.context.data["create_context"] + instance_id = instance.data["instance_id"] + created_instance = create_context.get_instance_by_id( + instance_id ) + created_instance["folderPath"] = context_folder_path + created_instance["task"] = context_task + create_context.save_changes() @staticmethod - def get_context_asset(instance): - return instance.context.data["folderPath"] + def get_context(instance): + """Return asset, task from publishing context data""" + context = instance.context + return context.data["folderPath"], context.data["task"] diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py index da3a194e58..df9ca0bf13 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_instance_subset.py @@ -36,18 +36,18 @@ class ValidateSubsetName(pyblish.api.InstancePlugin): ) if not isinstance(product_name, six.string_types): - raise TypeError(( + raise PublishValidationError(( "Instance product name must be string, got: {0} ({1})" ).format(product_name, type(product_name))) # Ensure is not empty product if not product_name: - raise ValueError( + raise PublishValidationError( "Instance product name is empty: {0}".format(product_name) ) # Validate product characters if not validate_name(product_name): - raise ValueError(( + raise PublishValidationError(( "Instance product name contains invalid characters: {0}" ).format(product_name)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instancer_content.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instancer_content.py deleted file mode 100644 index 5f57b31868..0000000000 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instancer_content.py +++ /dev/null @@ -1,75 +0,0 @@ -import maya.cmds as cmds -import pyblish.api - -from ayon_core.hosts.maya.api import lib -from ayon_core.pipeline.publish import PublishValidationError - - -class ValidateInstancerContent(pyblish.api.InstancePlugin): - """Validates that all meshes in the instance have object IDs. - - This skips a check on intermediate objects because we consider them - not important. - """ - order = pyblish.api.ValidatorOrder - label = 'Instancer Content' - families = ['instancer'] - - def process(self, instance): - - error = False - members = instance.data['setMembers'] - export_members = instance.data['exactExportMembers'] - - self.log.debug("Contents {0}".format(members)) - - if not len(members) == len(cmds.ls(members, type="instancer")): - self.log.error("Instancer can only contain instancers") - error = True - - # TODO: Implement better check for particles are cached - if not cmds.ls(export_members, type="nucleus"): - self.log.error("Instancer must have a connected nucleus") - error = True - - if not cmds.ls(export_members, type="cacheFile"): - self.log.error("Instancer must be cached") - error = True - - hidden = self.check_geometry_hidden(export_members) - if not hidden: - error = True - self.log.error("Instancer input geometry must be hidden " - "the scene. Invalid: {0}".format(hidden)) - - # Ensure all in one group - parents = cmds.listRelatives(members, - allParents=True, - fullPath=True) or [] - roots = list(set(cmds.ls(parents, assemblies=True, long=True))) - if len(roots) > 1: - self.log.error("Instancer should all be contained in a single " - "group. Current roots: {0}".format(roots)) - error = True - - if error: - raise PublishValidationError( - "Instancer Content is invalid. See log.") - - def check_geometry_hidden(self, export_members): - - # Ensure all instanced geometry is hidden - shapes = cmds.ls(export_members, - dag=True, - shapes=True, - noIntermediate=True) - meshes = cmds.ls(shapes, type="mesh") - - visible = [node for node in meshes - if lib.is_visible(node, - displayLayer=False, - intermediateObject=False)] - if visible: - return False - - return True diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_instancer_frame_ranges.py b/client/ayon_core/hosts/maya/plugins/publish/validate_instancer_frame_ranges.py deleted file mode 100644 index be6724d7e9..0000000000 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_instancer_frame_ranges.py +++ /dev/null @@ -1,167 +0,0 @@ -import os -import re - -import pyblish.api - -from ayon_core.pipeline.publish import PublishValidationError - - -def is_cache_resource(resource): - """Return whether resource is a cacheFile resource""" - required = set(["maya", "node", "cacheFile"]) - tags = resource.get("tags", []) - return required.issubset(tags) - - -def valdidate_files(files): - for f in files: - assert os.path.exists(f) - assert f.endswith(".mcx") or f.endswith(".mcc") - - return True - - -def filter_ticks(files): - tick_files = set() - ticks = set() - for path in files: - match = re.match(".+Tick([0-9]+).mcx$", os.path.basename(path)) - if match: - tick_files.add(path) - num = match.group(1) - ticks.add(int(num)) - - return tick_files, ticks - - -class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin): - """Validates all instancer particle systems are cached correctly. - - This means they should have the files/frames as required by the start-end - frame (including handles). - - This also checks the files exist and checks the "ticks" (substeps) files. - - """ - order = pyblish.api.ValidatorOrder - label = 'Instancer Cache Frame Ranges' - families = ['instancer'] - - @classmethod - def get_invalid(cls, instance): - - import pyseq - - start_frame = instance.data.get("frameStart", 0) - end_frame = instance.data.get("frameEnd", 0) - required = range(int(start_frame), int(end_frame) + 1) - - invalid = list() - resources = instance.data.get("resources", []) - - for resource in resources: - if not is_cache_resource(resource): - continue - - node = resource['node'] - all_files = resource['files'][:] - all_lookup = set(all_files) - - # The first file is usually the .xml description file. - xml = all_files.pop(0) - assert xml.endswith(".xml") - - # Ensure all files exist (including ticks) - # The remainder file paths should be the .mcx or .mcc files - valdidate_files(all_files) - - # Maya particle caches support substeps by saving out additional - # files that end with a Tick60.mcx, Tick120.mcx, etc. suffix. - # To avoid `pyseq` getting confused we filter those out and then - # for each file (except the last frame) check that at least all - # ticks exist. - - tick_files, ticks = filter_ticks(all_files) - if tick_files: - files = [f for f in all_files if f not in tick_files] - else: - files = all_files - - sequences = pyseq.get_sequences(files) - if len(sequences) != 1: - invalid.append(node) - cls.log.warning("More than one sequence found? " - "{0} {1}".format(node, files)) - cls.log.warning("Found caches: {0}".format(sequences)) - continue - - sequence = sequences[0] - cls.log.debug("Found sequence: {0}".format(sequence)) - - start = sequence.start() - end = sequence.end() - - if start > start_frame or end < end_frame: - invalid.append(node) - cls.log.warning("Sequence does not have enough " - "frames: {0}-{1} (requires: {2}-{3})" - "".format(start, end, - start_frame, - end_frame)) - continue - - # Ensure all frames are present - missing = set(sequence.missing()) - if missing: - required_missing = [x for x in required if x in missing] - if required_missing: - invalid.append(node) - cls.log.warning("Sequence is missing required frames: " - "{0}".format(required_missing)) - continue - - # Ensure all tick files (substep) exist for the files in the folder - # for the frames required by the time range. - if ticks: - ticks = list(sorted(ticks)) - cls.log.debug("Found ticks: {0} " - "(substeps: {1})".format(ticks, len(ticks))) - - # Check all frames except the last since we don't - # require subframes after our time range. - tick_check_frames = set(required[:-1]) - - # Check all frames - for item in sequence: - frame = item.frame - if not frame: - invalid.append(node) - cls.log.error("Path is not a frame in sequence: " - "{0}".format(item)) - continue - - # Not required for our time range - if frame not in tick_check_frames: - continue - - path = item.path - for num in ticks: - base, ext = os.path.splitext(path) - tick_file = base + "Tick{0}".format(num) + ext - if tick_file not in all_lookup: - invalid.append(node) - cls.log.warning("Tick file found that is not " - "in cache query filenames: " - "{0}".format(tick_file)) - - return invalid - - def process(self, instance): - - invalid = self.get_invalid(instance) - - if invalid: - self.log.error("Invalid nodes: {0}".format(invalid)) - raise PublishValidationError( - ("Invalid particle caches in instance. " - "See logs for details.")) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_loaded_plugin.py b/client/ayon_core/hosts/maya/plugins/publish/validate_loaded_plugin.py index 54a3e16111..a05920a21e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_loaded_plugin.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_loaded_plugin.py @@ -4,24 +4,27 @@ import maya.cmds as cmds from ayon_core.pipeline.publish import ( RepairContextAction, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateLoadedPlugin(pyblish.api.ContextPlugin): +class ValidateLoadedPlugin(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): """Ensure there are no unauthorized loaded plugins""" label = "Loaded Plugin" order = pyblish.api.ValidatorOrder host = ["maya"] actions = [RepairContextAction] + optional = True @classmethod def get_invalid(cls, context): invalid = [] loaded_plugin = cmds.pluginInfo(query=True, listPlugins=True) - # get variable from OpenPype settings + # get variable from AYON settings whitelist_native_plugins = cls.whitelist_native_plugins authorized_plugins = cls.authorized_plugins or [] @@ -35,7 +38,8 @@ class ValidateLoadedPlugin(pyblish.api.ContextPlugin): return invalid def process(self, context): - + if not self.is_active(context.data): + return invalid = self.get_invalid(context) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py index d8a9222c36..cfd4156124 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_default_shaders_connections.py @@ -2,7 +2,6 @@ from maya import cmds import pyblish.api from ayon_core.pipeline.publish import ( - ValidateContentsOrder, RepairContextAction, PublishValidationError ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_id_reference_edits.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_id_reference_edits.py index 1d313bdae4..7ae3b4b9b5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_id_reference_edits.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_id_reference_edits.py @@ -27,6 +27,13 @@ class ValidateLookIdReferenceEdits(pyblish.api.InstancePlugin): actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + def process(self, instance): invalid = self.get_invalid(instance) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py index 656b91216b..070974aef5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_look_shading_group.py @@ -5,11 +5,13 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateShadingEngine(pyblish.api.InstancePlugin): +class ValidateShadingEngine(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate all shading engines are named after the surface material. Shading engines should be named "{surface_shader}SG" @@ -22,9 +24,12 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin): actions = [ ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction ] + optional = True # The default connections to check def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: @@ -42,10 +47,18 @@ class ValidateShadingEngine(pyblish.api.InstancePlugin): shape, destination=True, type="shadingEngine" ) or [] for shading_engine in shading_engines: - name = ( - cmds.listConnections(shading_engine + ".surfaceShader")[0] - + "SG" + materials = cmds.listConnections( + shading_engine + ".surfaceShader", + source=True, destination=False ) + if not materials: + cls.log.warning( + "Shading engine '{}' has no material connected to its " + ".surfaceShader attribute.".format(shading_engine)) + continue + + material = materials[0] # there should only ever be one input + name = material + "SG" if shading_engine != name: invalid.append(shading_engine) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py index eca27d95da..47314b64ac 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_maya_units.py @@ -3,15 +3,16 @@ import maya.cmds as cmds import pyblish.api import ayon_core.hosts.maya.api.lib as mayalib -from ayon_core.pipeline.context_tools import get_current_project_asset from ayon_core.pipeline.publish import ( RepairContextAction, ValidateSceneOrder, - PublishXmlValidationError + PublishXmlValidationError, + OptionalPyblishPluginMixin ) -class ValidateMayaUnits(pyblish.api.ContextPlugin): +class ValidateMayaUnits(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): """Check if the Maya units are set correct""" order = ValidateSceneOrder @@ -35,6 +36,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): "Maya scene {setting} must be '{required_value}'. " "Current value is '{current_value}'." ) + optional = False @classmethod def apply_settings(cls, project_settings): @@ -52,15 +54,16 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): cls.validate_fps = settings.get("validate_fps", cls.validate_fps) def process(self, context): - + if not self.is_active(context.data): + return # Collected units linearunits = context.data.get('linearUnits') angularunits = context.data.get('angularUnits') fps = context.data.get('fps') - asset_doc = context.data["assetEntity"] - asset_fps = mayalib.convert_to_maya_fps(asset_doc["data"]["fps"]) + folder_attributes = context.data["folderEntity"]["attrib"] + folder_fps = mayalib.convert_to_maya_fps(folder_attributes["fps"]) self.log.info('Units (linear): {0}'.format(linearunits)) self.log.info('Units (angular): {0}'.format(angularunits)) @@ -91,10 +94,10 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): "current_value": angularunits }) - if self.validate_fps and fps and fps != asset_fps: + if self.validate_fps and fps and fps != folder_fps: invalid.append({ "setting": "FPS", - "required_value": asset_fps, + "required_value": folder_fps, "current_value": fps }) @@ -127,7 +130,5 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin): cls.log.debug(current_linear) cls.log.info("Setting time unit to match project") - # TODO replace query with using 'context.data["assetEntity"]' - asset_doc = get_current_project_asset() - asset_fps = asset_doc["data"]["fps"] - mayalib.set_scene_fps(asset_fps) + folder_entity = context.data["folderEntity"] + mayalib.set_scene_fps(folder_entity["attrib"]["fps"]) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py index 934cbae327..c95e1ec816 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_empty.py @@ -51,5 +51,5 @@ class ValidateMeshEmpty(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - "Meshes found in instance without any vertices: %s" % invalid + "Meshes found without any vertices: %s" % invalid ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py index e76553629f..bfb4257f23 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_lamina_faces.py @@ -2,10 +2,15 @@ from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.pipeline.publish import ValidateMeshOrder +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) -class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin): +class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate meshes don't have lamina faces. Lamina faces share all of their edges. @@ -17,6 +22,17 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin): families = ['model'] label = 'Mesh Lamina Faces' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = True + + description = ( + "## Meshes with Lamina Faces\n" + "Detected meshes with lamina faces. Lamina faces are faces " + "that share all of their edges and thus are merged together on top of " + "each other.\n\n" + "### How to repair?\n" + "You can repair them by using Maya's modeling tool `Mesh > Cleanup..` " + "and select to cleanup matching polygons for lamina faces." + ) @staticmethod def get_invalid(instance): @@ -28,9 +44,12 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin): def process(self, instance): """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Meshes found with lamina faces: " - "{0}".format(invalid)) + raise PublishValidationError( + "Meshes found with lamina faces: {0}".format(invalid), + description=self.description) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py index f8dfe65b32..58d015e962 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_ngons.py @@ -3,10 +3,15 @@ from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) -class ValidateMeshNgons(pyblish.api.Validator): +class ValidateMeshNgons(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure that meshes don't have ngons Ngon are faces with more than 4 sides. @@ -21,6 +26,16 @@ class ValidateMeshNgons(pyblish.api.Validator): families = ["model"] label = "Mesh ngons" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = True + + description = ( + "## Meshes with NGONs Faces\n" + "Detected meshes with NGON faces. **NGONS** are faces that " + "with more than four sides.\n\n" + "### How to repair?\n" + "You can repair them by usings Maya's modeling tool Mesh > Cleanup.. " + "and select to cleanup matching polygons for lamina faces." + ) @staticmethod def get_invalid(instance): @@ -30,6 +45,11 @@ class ValidateMeshNgons(pyblish.api.Validator): # Get all faces faces = ['{0}.f[*]'.format(node) for node in meshes] + # Skip meshes that for some reason have no faces, e.g. empty meshes + faces = cmds.ls(faces) + if not faces: + return [] + # Filter to n-sided polygon faces (ngons) invalid = lib.polyConstraint(faces, t=0x0008, # type=face @@ -39,8 +59,11 @@ class ValidateMeshNgons(pyblish.api.Validator): def process(self, instance): """Process all the nodes in the instance "objectSet""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Meshes found with n-gons" - "values: {0}".format(invalid)) + raise PublishValidationError( + "Meshes found with n-gons: {0}".format(invalid), + description=self.description) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py index 0e9147d978..bf1489f92e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_no_negative_scale.py @@ -4,7 +4,8 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateMeshOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) @@ -15,7 +16,8 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNoNegativeScale(pyblish.api.Validator): +class ValidateMeshNoNegativeScale(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure that meshes don't have a negative scale. Using negatively scaled proxies in a VRayMesh results in inverted @@ -32,6 +34,7 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): families = ['model'] label = 'Mesh No Negative Scale' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False @staticmethod def get_invalid(instance): @@ -52,7 +55,8 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator): def process(self, instance): """Process all the nodes in the instance 'objectSet'""" - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py index 1c7ea10a50..958707e4f4 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_non_manifold.py @@ -1,13 +1,99 @@ -from maya import cmds +from maya import cmds, mel import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateMeshOrder, - PublishValidationError + PublishXmlValidationError, + RepairAction, + OptionalPyblishPluginMixin ) +def poly_cleanup(version=4, + meshes=None, + # Version 1 + all_meshes=False, + select_only=False, + history_on=True, + quads=False, + nsided=False, + concave=False, + holed=False, + nonplanar=False, + zeroGeom=False, + zeroGeomTolerance=1e-05, + zeroEdge=False, + zeroEdgeTolerance=1e-05, + zeroMap=False, + zeroMapTolerance=1e-05, + # Version 2 + shared_uvs=False, + non_manifold=False, + # Version 3 + lamina=False, + # Version 4 + invalid_components=False): + """Wrapper around `polyCleanupArgList` mel command""" + + # Get all inputs named as `dict` to easily do conversions and formatting + values = locals() + + # Convert booleans to 1 or 0 + for key in [ + "all_meshes", + "select_only", + "history_on", + "quads", + "nsided", + "concave", + "holed", + "nonplanar", + "zeroGeom", + "zeroEdge", + "zeroMap", + "shared_uvs", + "non_manifold", + "lamina", + "invalid_components", + ]: + values[key] = 1 if values[key] else 0 + + cmd = ( + 'polyCleanupArgList {version} {{ ' + '"{all_meshes}",' # 0: All selectable meshes + '"{select_only}",' # 1: Only perform a selection + '"{history_on}",' # 2: Keep construction history + '"{quads}",' # 3: Check for quads polys + '"{nsided}",' # 4: Check for n-sides polys + '"{concave}",' # 5: Check for concave polys + '"{holed}",' # 6: Check for holed polys + '"{nonplanar}",' # 7: Check for non-planar polys + '"{zeroGeom}",' # 8: Check for 0 area faces + '"{zeroGeomTolerance}",' # 9: Tolerance for face areas + '"{zeroEdge}",' # 10: Check for 0 length edges + '"{zeroEdgeTolerance}",' # 11: Tolerance for edge length + '"{zeroMap}",' # 12: Check for 0 uv face area + '"{zeroMapTolerance}",' # 13: Tolerance for uv face areas + '"{shared_uvs}",' # 14: Unshare uvs that are shared + # across vertices + '"{non_manifold}",' # 15: Check for nonmanifold polys + '"{lamina}",' # 16: Check for lamina polys + '"{invalid_components}"' # 17: Remove invalid components + ' }};'.format(**values) + ) + + mel.eval("source polyCleanupArgList") + if not all_meshes and meshes: + # Allow to specify meshes to run over by selecting them + cmds.select(meshes, replace=True) + mel.eval(cmd) + + +class CleanupMatchingPolygons(RepairAction): + label = "Cleanup matching polygons" + + def _as_report_list(values, prefix="- ", suffix="\n"): """Return list as bullet point list for a report""" if not values: @@ -15,7 +101,8 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNonManifold(pyblish.api.Validator): +class ValidateMeshNonManifold(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure that meshes don't have non-manifold edges or vertices To debug the problem on the meshes you can use Maya's modeling @@ -27,7 +114,9 @@ class ValidateMeshNonManifold(pyblish.api.Validator): hosts = ['maya'] families = ['model'] label = 'Mesh Non-Manifold Edges/Vertices' - actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, + CleanupMatchingPolygons] + optional = True @staticmethod def get_invalid(instance): @@ -36,21 +125,46 @@ class ValidateMeshNonManifold(pyblish.api.Validator): invalid = [] for mesh in meshes: - if (cmds.polyInfo(mesh, nonManifoldVertices=True) or - cmds.polyInfo(mesh, nonManifoldEdges=True)): - invalid.append(mesh) + components = cmds.polyInfo(mesh, + nonManifoldVertices=True, + nonManifoldEdges=True) + if components: + invalid.extend(components) return invalid def process(self, instance): """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError( - "Meshes found with non-manifold edges/vertices:\n\n{0}".format( - _as_report_list(sorted(invalid)) - ), - title="Non-Manifold Edges/Vertices" + # Report only the meshes instead of all component indices + invalid_meshes = { + component.split(".", 1)[0] for component in invalid + } + invalid_meshes = _as_report_list(sorted(invalid_meshes)) + + raise PublishXmlValidationError( + plugin=self, + message=( + "Meshes found with non-manifold " + "edges/vertices:\n\n{0}".format(invalid_meshes) + ) ) + + @classmethod + def repair(cls, instance): + invalid_components = cls.get_invalid(instance) + if not invalid_components: + cls.log.info("No invalid components found to cleanup.") + return + + invalid_meshes = { + component.split(".", 1)[0] for component in invalid_components + } + poly_cleanup(meshes=list(invalid_meshes), + select_only=True, + non_manifold=True) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py index 1790a94580..76b716d01f 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_normals_unlocked.py @@ -18,7 +18,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateMeshNormalsUnlocked(pyblish.api.Validator, +class ValidateMeshNormalsUnlocked(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate all meshes in the instance have unlocked normals diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py index d55b58cd0d..70ede83f2d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_shader_connections.py @@ -5,7 +5,8 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) @@ -79,7 +80,8 @@ def disconnect(node_a, node_b): cmds.disconnectAttr(source, input) -class ValidateMeshShaderConnections(pyblish.api.InstancePlugin): +class ValidateMeshShaderConnections(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure mesh shading engine connections are valid. In some scenarios Maya keeps connections to multiple shaders even if just @@ -96,15 +98,18 @@ class ValidateMeshShaderConnections(pyblish.api.InstancePlugin): label = "Mesh Shader Connections" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] + optional = True def process(self, instance): """Process all the nodes in the instance 'objectSet'""" - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Shapes found with invalid shader " - "connections: {0}".format(invalid)) + raise PublishValidationError( + "Shapes found with invalid shader connections: " + "{0}".format(invalid)) @staticmethod def get_invalid(instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py index 8dbd0ca264..21697cd903 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_single_uv_set.py @@ -6,7 +6,8 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -66,7 +67,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin, if allowed: self.log.warning(message) else: - raise ValueError(message) + raise PublishValidationError(message) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py index c7f405b0cf..305a58d78e 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_uv_set_map1.py @@ -1,3 +1,5 @@ +import inspect + from maya import cmds import pyblish.api @@ -5,7 +7,8 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -28,8 +31,8 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] - @staticmethod - def get_invalid(instance): + @classmethod + def get_invalid(cls, instance): meshes = cmds.ls(instance, type='mesh', long=True) @@ -39,6 +42,11 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, # Get existing mapping of uv sets by index indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + cls.log.warning("Mesh has no UV set: %s", mesh) + invalid.append(mesh) + continue + mapping = dict(zip(indices, maps)) # Get the uv set at index zero. @@ -55,8 +63,14 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: - raise ValueError("Meshes found without 'map1' " - "UV set: {0}".format(invalid)) + + invalid_list = "\n".join(f"- {node}" for node in invalid) + + raise PublishValidationError( + "Meshes found without 'map1' UV set:\n" + "{0}".format(invalid_list), + description=self.get_description() + ) @classmethod def repair(cls, instance): @@ -67,6 +81,12 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, # Get existing mapping of uv sets by index indices = cmds.polyUVSet(mesh, query=True, allUVSetsIndices=True) maps = cmds.polyUVSet(mesh, query=True, allUVSets=True) + if not indices or not maps: + # No UV set exist at all, create a `map1` uv set + # This may fail silently if the mesh has no geometry at all + cmds.polyUVSet(mesh, create=True, uvSet="map1") + continue + mapping = dict(zip(indices, maps)) # Ensure there is no uv set named map1 to avoid @@ -96,3 +116,23 @@ class ValidateMeshUVSetMap1(pyblish.api.InstancePlugin, rename=True, uvSet=original, newUVSet="map1") + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Mesh found without map1 uv set + + A mesh must have a default UV set named `map1` to adhere to the default + mesh behavior of Maya meshes. + + There may be meshes that: + - Have no UV set + - Have no `map1` uv set but are using a different name + - Have a `map1` uv set, but it's not the default (first index) + + + #### Repair + + Using repair will try to make the first UV set the `map1` uv set. If it + does not exist yet it will be created or renames the current first + UV set to `map1`. + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py index 10b5d77cf3..f0962148dc 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_mesh_vertices_have_edges.py @@ -4,10 +4,15 @@ from maya import cmds import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api.lib import len_flattened from ayon_core.pipeline.publish import ( - PublishValidationError, RepairAction, ValidateMeshOrder) + PublishValidationError, + RepairAction, + ValidateMeshOrder, + OptionalPyblishPluginMixin +) -class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): +class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate meshes have only vertices that are connected to edges. Maya can have invalid geometry with vertices that have no edges or @@ -32,6 +37,7 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): label = 'Mesh Vertices Have Edges' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] + optional = True @classmethod def repair(cls, instance): @@ -72,7 +78,8 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin): return invalid def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py index b0db5e435a..bbc644c3db 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_model_content.py @@ -1,3 +1,5 @@ +import inspect + from maya import cmds import pyblish.api @@ -5,15 +7,16 @@ import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateModelContent(pyblish.api.InstancePlugin): +class ValidateModelContent(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Adheres to the content of 'model' product type - - Must have one top group. (configurable) - - Must only contain: transforms, meshes and groups + See `get_description` for more details. """ @@ -24,14 +27,18 @@ class ValidateModelContent(pyblish.api.InstancePlugin): actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] validate_top_group = True + optional = False + + allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator') @classmethod def get_invalid(cls, instance): content_instance = instance.data.get("setMembers", None) if not content_instance: - cls.log.error("Instance has no nodes!") - return [instance.data["name"]] + cls.log.error("Model instance has no nodes. " + "It is not allowed to be empty") + return [instance.data["instance_node"]] # All children will be included in the extracted export so we also # validate *all* descendents of the set members and we skip any @@ -43,30 +50,42 @@ class ValidateModelContent(pyblish.api.InstancePlugin): content_instance = list(set(content_instance + descendants)) # Ensure only valid node types - allowed = ('mesh', 'transform', 'nurbsCurve', 'nurbsSurface', 'locator') nodes = cmds.ls(content_instance, long=True) - valid = cmds.ls(content_instance, long=True, type=allowed) + valid = cmds.ls(content_instance, long=True, type=cls.allowed) invalid = set(nodes) - set(valid) if invalid: - cls.log.error("These nodes are not allowed: %s" % invalid) + # List as bullet points + invalid_bullets = "\n".join(f"- {node}" for node in invalid) + + cls.log.error( + "These nodes are not allowed:\n{}\n\n" + "The valid node types are: {}".format( + invalid_bullets, ", ".join(cls.allowed)) + ) return list(invalid) if not valid: - cls.log.error("No valid nodes in the instance") - return True + cls.log.error( + "No valid nodes in the model instance.\n" + "The valid node types are: {}".format(", ".join(cls.allowed)) + ) + return [instance.data["instance_node"]] # Ensure it has shapes shapes = cmds.ls(valid, long=True, shapes=True) if not shapes: cls.log.error("No shapes in the model instance") - return True + return [instance.data["instance_node"]] - # Top group - top_parents = set([x.split("|")[1] for x in content_instance]) + # Ensure single top group + top_parents = {"|" + x.split("|", 2)[1] for x in content_instance} if cls.validate_top_group and len(top_parents) != 1: - cls.log.error("Must have exactly one top group") - return top_parents + cls.log.error( + "A model instance must have exactly one top group. " + "Found top groups: {}".format(", ".join(top_parents)) + ) + return list(top_parents) def _is_visible(node): """Return whether node is visible""" @@ -91,11 +110,28 @@ class ValidateModelContent(pyblish.api.InstancePlugin): return list(invalid) def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( title="Model content is invalid", - message="See log for more details" + message="Model content is invalid. See log for more details.", + description=self.get_description() ) + + @classmethod + def get_description(cls): + return inspect.cleandoc(f""" + ### Model content is invalid + + Your model instance does not adhere to the rules of a + model product type: + + - Must have at least one visible shape in it, like a mesh. + - Must have one root node. When exporting multiple meshes they + must be inside a group. + - May only contain the following node types: + {", ".join(cls.allowed)} + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py index 6e0719628f..bf45c0e974 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_animation.py @@ -16,7 +16,7 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateNoAnimation(pyblish.api.Validator, +class ValidateNoAnimation(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Ensure no keyframes on nodes in the Instance. diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_default_camera.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_default_camera.py index 9977562ca3..3e21ec6e50 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_default_camera.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_default_camera.py @@ -4,7 +4,8 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) @@ -15,7 +16,8 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateNoDefaultCameras(pyblish.api.InstancePlugin): +class ValidateNoDefaultCameras(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure no default (startup) cameras are in the instance. This might be unnecessary. In the past there were some issues with @@ -28,6 +30,7 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin): families = ['camera'] label = "No Default Cameras" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False @staticmethod def get_invalid(instance): @@ -37,6 +40,8 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin): def process(self, instance): """Process all the cameras in the instance""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py index b9b8aa2708..f546caff2c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_namespace.py @@ -4,7 +4,8 @@ import pyblish.api from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) import ayon_core.hosts.maya.api.action @@ -24,7 +25,8 @@ def get_namespace(node_name): return node_name.rpartition(":")[0] -class ValidateNoNamespace(pyblish.api.InstancePlugin): +class ValidateNoNamespace(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure the nodes don't have a namespace""" order = ValidateContentsOrder @@ -33,6 +35,7 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin): label = 'No Namespaces' actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] + optional = False @staticmethod def get_invalid(instance): @@ -41,14 +44,22 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin): def process(self, instance): """Process all the nodes in the instance""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: + invalid_namespaces = {get_namespace(node) for node in invalid} raise PublishValidationError( - "Namespaces found:\n\n{0}".format( - _as_report_list(sorted(invalid)) + message="Namespaces found:\n\n{0}".format( + _as_report_list(sorted(invalid_namespaces)) ), - title="Namespaces in model" + title="Namespaces in model", + description=( + "## Namespaces found in model\n" + "It is not allowed to publish a model that contains " + "namespaces." + ) ) @classmethod diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py index 9899768dc0..38955fd777 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_null_transforms.py @@ -5,7 +5,8 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) @@ -18,26 +19,22 @@ def _as_report_list(values, prefix="- ", suffix="\n"): def has_shape_children(node): # Check if any descendants - allDescendents = cmds.listRelatives(node, - allDescendents=True, - fullPath=True) - if not allDescendents: + all_descendents = cmds.listRelatives(node, + allDescendents=True, + fullPath=True) + if not all_descendents: return False # Check if there are any shapes at all - shapes = cmds.ls(allDescendents, shapes=True) + shapes = cmds.ls(all_descendents, shapes=True, noIntermediate=True) if not shapes: return False - # Check if all descendent shapes are intermediateObjects; - # if so we consider this node a null node and return False. - if all(cmds.getAttr('{0}.intermediateObject'.format(x)) for x in shapes): - return False - return True -class ValidateNoNullTransforms(pyblish.api.InstancePlugin): +class ValidateNoNullTransforms(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure no null transforms are in the scene. Warning: @@ -54,6 +51,7 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin): label = 'No Empty/Null Transforms' actions = [RepairAction, ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False @staticmethod def get_invalid(instance): @@ -70,6 +68,8 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin): def process(self, instance): """Process all the transform nodes in the instance """ + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_no_vraymesh.py b/client/ayon_core/hosts/maya/plugins/publish/validate_no_vraymesh.py index be8296a820..2d59608e11 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_no_vraymesh.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_no_vraymesh.py @@ -1,7 +1,9 @@ import pyblish.api from maya import cmds -from ayon_core.pipeline.publish import PublishValidationError - +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) def _as_report_list(values, prefix="- ", suffix="\n"): """Return list as bullet point list for a report""" @@ -10,15 +12,18 @@ def _as_report_list(values, prefix="- ", suffix="\n"): return prefix + (suffix + prefix).join(values) -class ValidateNoVRayMesh(pyblish.api.InstancePlugin): +class ValidateNoVRayMesh(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate there are no VRayMesh objects in the instance""" order = pyblish.api.ValidatorOrder label = 'No V-Ray Proxies (VRayMesh)' families = ["pointcache"] + optional = False def process(self, instance): - + if not self.is_active(instance.data): + return if not cmds.pluginInfo("vrayformaya", query=True, loaded=True): return diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py index f40db988c6..2d6f231cb5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids.py @@ -31,6 +31,13 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin): actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, ayon_core.hosts.maya.api.action.GenerateUUIDsOnInvalidAction] + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + def process(self, instance): """Process all meshes""" @@ -53,7 +60,8 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin): # We do want to check the referenced nodes as it might be # part of the end product. id_nodes = lib.get_id_required_nodes(referenced_nodes=True, - nodes=instance[:]) - invalid = [n for n in id_nodes if not lib.get_id(n)] - - return invalid + nodes=instance[:], + # Exclude those with already + # existing ids + existing_ids=False) + return id_nodes diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py index 912311cc8d..545ab8e28c 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_deformed_shapes.py @@ -26,6 +26,13 @@ class ValidateNodeIdsDeformedShape(pyblish.api.InstancePlugin): RepairAction ] + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + def process(self, instance): """Process all the nodes in the instance""" diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py index de86ffe575..d679c510af 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_in_database.py @@ -1,14 +1,14 @@ import pyblish.api +import ayon_api import ayon_core.hosts.maya.api.action -from ayon_core.client import get_assets from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( PublishValidationError, ValidatePipelineOrder) class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): - """Validate if the CB Id is related to an asset in the database + """Validate if the CB Id is related to an folder in the database All nodes with the `cbId` attribute will be validated to ensure that the loaded asset in the scene is related to the current project. @@ -26,31 +26,38 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, ayon_core.hosts.maya.api.action.GenerateUUIDsOnInvalidAction] + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + def process(self, instance): invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - ("Found asset IDs which are not related to " - "current project in instance: `{}`").format(instance.name)) + "Found folder ids which are not related to " + "current project in instance: `{}`".format(instance.name)) @classmethod def get_invalid(cls, instance): - invalid = [] + nodes = instance[:] + if not nodes: + return # Get all id required nodes - id_required_nodes = lib.get_id_required_nodes(referenced_nodes=True, - nodes=instance[:]) + id_required_nodes = lib.get_id_required_nodes(referenced_nodes=False, + nodes=nodes) + if not id_required_nodes: + return # check ids against database ids - project_name = instance.context.data["projectName"] - asset_docs = get_assets(project_name, fields=["_id"]) - db_asset_ids = { - str(asset_doc["_id"]) - for asset_doc in asset_docs - } + folder_ids = cls.get_project_folder_ids(context=instance.context) # Get all asset IDs + invalid = [] for node in id_required_nodes: cb_id = lib.get_id(node) @@ -58,9 +65,37 @@ class ValidateNodeIdsInDatabase(pyblish.api.InstancePlugin): if not cb_id: continue - asset_id = cb_id.split(":", 1)[0] - if asset_id not in db_asset_ids: - cls.log.error("`%s` has unassociated asset ID" % node) + folder_id = cb_id.split(":", 1)[0] + if folder_id not in folder_ids: + cls.log.error("`%s` has unassociated folder id" % node) invalid.append(node) return invalid + + @classmethod + def get_project_folder_ids(cls, context): + """Return all folder ids in the current project. + + Arguments: + context (pyblish.api.Context): The publish context. + + Returns: + set[str]: All folder ids in the current project. + + """ + # We query the database only for the first instance instead of + # per instance by storing a cache in the context + key = "__cache_project_folder_ids" + if key in context.data: + return context.data[key] + + # check ids against database + project_name = context.data["projectName"] + folder_entities = ayon_api.get_folders(project_name, fields={"id"}) + folder_ids = { + folder_entity["id"] + for folder_entity in folder_entities + } + + context.data[key] = folder_ids + return folder_ids diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py index b2db535fa6..17eb58f421 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_related.py @@ -1,16 +1,27 @@ +import inspect +import uuid +from collections import defaultdict import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( OptionalPyblishPluginMixin, PublishValidationError, ValidatePipelineOrder) +from ayon_api import get_folders + + +def is_valid_uuid(value) -> bool: + """Return whether value is a valid UUID""" + try: + uuid.UUID(value) + except ValueError: + return False + return True class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): - """Validate nodes have a related Colorbleed Id to the instance.data[asset] - - """ + """Validate nodes have a related `cbId` to the instance.data[folderPath]""" order = ValidatePipelineOrder label = 'Node Ids Related (ID)' @@ -23,6 +34,13 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, ayon_core.hosts.maya.api.action.GenerateUUIDsOnInvalidAction] + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + def process(self, instance): """Process all nodes in instance (including hierarchy)""" if not self.is_active(instance.data): @@ -31,27 +49,74 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin, # Ensure all nodes have a cbId invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError( - ("Nodes IDs found that are not related to asset " - "'{}' : {}").format(instance.data['asset'], invalid)) + + invalid_list = "\n".join(f"- {node}" for node in sorted(invalid)) + + raise PublishValidationError(( + "Nodes IDs found that are not related to folder '{}':\n{}" + ).format(instance.data["folderPath"], invalid_list), + description=self.get_description() + ) @classmethod def get_invalid(cls, instance): """Return the member nodes that are invalid""" - invalid = list() + folder_id = instance.data["folderEntity"]["id"] - asset_id = str(instance.data['assetEntity']["_id"]) - - # We do want to check the referenced nodes as we it might be + # We do want to check the referenced nodes as it might be # part of the end product + invalid = list() + nodes_by_other_folder_ids = defaultdict(set) for node in instance: - _id = lib.get_id(node) if not _id: continue - node_asset_id = _id.split(":", 1)[0] - if node_asset_id != asset_id: + node_folder_id = _id.split(":", 1)[0] + if node_folder_id != folder_id: invalid.append(node) + nodes_by_other_folder_ids[node_folder_id].add(node) + + # Log what other assets were found. + if nodes_by_other_folder_ids: + project_name = instance.context.data["projectName"] + other_folder_ids = set(nodes_by_other_folder_ids.keys()) + + # Remove folder ids that are not valid UUID identifiers, these + # may be legacy OpenPype ids + other_folder_ids = {folder_id for folder_id in other_folder_ids + if is_valid_uuid(folder_id)} + if not other_folder_ids: + return invalid + + folder_entities = get_folders(project_name=project_name, + folder_ids=other_folder_ids, + fields=["path"]) + if folder_entities: + # Log names of other assets detected + # We disregard logging nodes/ids for asset ids where no asset + # was found in the database because ValidateNodeIdsInDatabase + # takes care of that. + folder_paths = {entity["path"] for entity in folder_entities} + cls.log.error( + "Found nodes related to other folders:\n{}".format( + "\n".join(f"- {path}" for path in sorted(folder_paths)) + ) + ) return invalid + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Node IDs must match folder id + + The node ids must match the folder entity id you are publishing to. + + Usually these mismatch occurs if you are re-using nodes from another + folder or project. + + #### How to repair? + + The repair action will regenerate new ids for + the invalid nodes to match the instance's folder. + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py index eeede82caf..6b44a307d2 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_ids_unique.py @@ -8,6 +8,8 @@ from ayon_core.pipeline.publish import ( import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api import lib +from maya import cmds + class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): """Validate the nodes in the instance have a unique Colorbleed Id @@ -26,16 +28,23 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, ayon_core.hosts.maya.api.action.GenerateUUIDsOnInvalidAction] + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + def process(self, instance): """Process all meshes""" # Ensure all nodes have a cbId invalid = self.get_invalid(instance) if invalid: - label = "Nodes found with non-unique asset IDs" + label = "Nodes found with non-unique folder ids" raise PublishValidationError( - message="{}: {}".format(label, invalid), - title="Non-unique asset ids on nodes", + message="{}, see log".format(label), + title="Non-unique folder ids on nodes", description="{}\n- {}".format(label, "\n- ".join(sorted(invalid))) ) @@ -47,7 +56,6 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): # Check only non intermediate shapes # todo: must the instance itself ensure to have no intermediates? # todo: how come there are intermediates? - from maya import cmds instance_members = cmds.ls(instance, noIntermediate=True, long=True) # Collect each id with their members @@ -60,10 +68,14 @@ class ValidateNodeIdsUnique(pyblish.api.InstancePlugin): # Take only the ids with more than one member invalid = list() - _iteritems = getattr(ids, "iteritems", ids.items) - for _ids, members in _iteritems(): + for members in ids.values(): if len(members) > 1: - cls.log.error("ID found on multiple nodes: '%s'" % members) + members_text = "\n".join( + "- {}".format(member) for member in sorted(members) + ) + cls.log.error( + "ID found on multiple nodes:\n{}".format(members_text) + ) invalid.extend(members) return invalid diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py b/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py index 297618fd4f..10cbbc9a88 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_node_no_ghosting.py @@ -3,10 +3,15 @@ from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) -class ValidateNodeNoGhosting(pyblish.api.InstancePlugin): +class ValidateNodeNoGhosting(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure nodes do not have ghosting enabled. If one would publish towards a non-Maya format it's likely that stats @@ -23,6 +28,7 @@ class ValidateNodeNoGhosting(pyblish.api.InstancePlugin): families = ['model', 'rig'] label = "No Ghosting" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False _attributes = {'ghosting': 0} @@ -46,9 +52,10 @@ class ValidateNodeNoGhosting(pyblish.api.InstancePlugin): return invalid def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Nodes with ghosting enabled found: " - "{0}".format(invalid)) + raise PublishValidationError( + "Nodes with ghosting enabled found: {0}".format(invalid)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py b/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py index fd71039e30..f961ec6e4a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_plugin_path_attributes.py @@ -8,11 +8,13 @@ from ayon_core.hosts.maya.api.lib import pairwise from ayon_core.hosts.maya.api.action import SelectInvalidAction from ayon_core.pipeline.publish import ( ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidatePluginPathAttributes(pyblish.api.InstancePlugin): +class ValidatePluginPathAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """ Validate plug-in path attributes point to existing file paths. """ @@ -22,6 +24,7 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin): families = ["workfile"] label = "Plug-in Path Attributes" actions = [SelectInvalidAction] + optional = False # Attributes are defined in project settings attribute = [] @@ -60,6 +63,8 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin): def process(self, instance): """Process all directories Set as Filenames in Non-Maya Nodes""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py b/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py index 384d99df1a..117f7df822 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_render_image_rule.py @@ -5,10 +5,15 @@ import pyblish.api from maya import cmds from ayon_core.pipeline.publish import ( - PublishValidationError, RepairAction, ValidateContentsOrder) + PublishValidationError, + RepairAction, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) -class ValidateRenderImageRule(pyblish.api.InstancePlugin): +class ValidateRenderImageRule(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates Maya Workpace "images" file rule matches project settings. This validates against the configured default render image folder: @@ -22,9 +27,11 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin): hosts = ["maya"] families = ["renderlayer"] actions = [RepairAction] + optional = False def process(self, instance): - + if not self.is_active(instance.data): + return required_images_rule = os.path.normpath( self.get_default_render_image_folder(instance) ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_render_no_default_cameras.py b/client/ayon_core/hosts/maya/plugins/publish/validate_render_no_default_cameras.py index 32d0470b7f..41c0fa4807 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_render_no_default_cameras.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_render_no_default_cameras.py @@ -6,10 +6,12 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin): +class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure no default (startup) cameras are to be rendered.""" order = ValidateContentsOrder @@ -17,6 +19,7 @@ class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin): families = ['renderlayer'] label = "No Default Cameras Renderable" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False @staticmethod def get_invalid(instance): @@ -32,6 +35,8 @@ class ValidateRenderNoDefaultCameras(pyblish.api.InstancePlugin): def process(self, instance): """Process all the cameras in the instance""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py index f31059f594..e186d74b89 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_render_single_camera.py @@ -1,4 +1,5 @@ import re +import inspect import pyblish.api from maya import cmds @@ -7,11 +8,13 @@ import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api.lib_rendersettings import RenderSettings from ayon_core.pipeline.publish import ( ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): +class ValidateRenderSingleCamera(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate renderable camera count for layer and token. Pipeline is supporting multiple renderable cameras per layer, but image @@ -24,14 +27,20 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): families = ["renderlayer", "vrayscene"] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False R_CAMERA_TOKEN = re.compile(r'%c|', re.IGNORECASE) def process(self, instance): """Process all the cameras in the instance""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Invalid cameras for render.") + raise PublishValidationError( + "Invalid render cameras.", + description=self.get_description() + ) @classmethod def get_invalid(cls, instance): @@ -46,17 +55,30 @@ class ValidateRenderSingleCamera(pyblish.api.InstancePlugin): RenderSettings.get_image_prefix_attr(renderer) ) - + renderlayer = instance.data["renderlayer"] if len(cameras) > 1: if re.search(cls.R_CAMERA_TOKEN, file_prefix): # if there is token in prefix and we have more then # 1 camera, all is ok. return - cls.log.error("Multiple renderable cameras found for %s: %s " % - (instance.data["setMembers"], cameras)) - return [instance.data["setMembers"]] + cameras + cls.log.error( + "Multiple renderable cameras found for %s: %s ", + renderlayer, ", ".join(cameras)) + return [renderlayer] + cameras elif len(cameras) < 1: - cls.log.error("No renderable cameras found for %s " % - instance.data["setMembers"]) - return [instance.data["setMembers"]] + cls.log.error("No renderable cameras found for %s ", renderlayer) + return [renderlayer] + + def get_description(self): + return inspect.cleandoc( + """### Render Cameras Invalid + + Your render cameras are misconfigured. You may have no render + camera set or have multiple cameras with a render filename + prefix that does not include the `` token. + + See the logs for more details about the cameras. + + """ + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py b/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py index 900e5444a9..910e336fc1 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_renderlayer_aovs.py @@ -1,11 +1,14 @@ +import ayon_api import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.client import get_subset_by_name -from ayon_core.pipeline.publish import PublishValidationError +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) - -class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): +class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate created AOVs / RenderElement is registered in the database Each render element is registered as a product which is formatted based on @@ -26,8 +29,12 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): hosts = ["maya"] families = ["renderlayer"] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( @@ -37,11 +44,11 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): invalid = [] project_name = instance.context.data["projectName"] - asset_doc = instance.data["assetEntity"] + folder_entity = instance.data["folderEntity"] render_passes = instance.data.get("renderPasses", []) for render_pass in render_passes: is_valid = self.validate_product_registered( - project_name, asset_doc, render_pass + project_name, folder_entity, render_pass ) if not is_valid: invalid.append(render_pass) @@ -49,10 +56,10 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin): return invalid def validate_product_registered( - self, project_name, asset_doc, product_name + self, project_name, folder_entity, product_name ): - """Check if product is registered in the database under the asset""" + """Check if product is registered in the database under the folder""" - return get_subset_by_name( - project_name, product_name, asset_doc["_id"], fields=["_id"] + return ayon_api.get_product_by_name( + project_name, product_name, folder_entity["id"], fields={"id"} ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py b/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py index ff552f566d..d822dca288 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_resolution.py @@ -37,7 +37,7 @@ class ValidateResolution(pyblish.api.InstancePlugin, @classmethod def get_invalid_resolution(cls, instance): - width, height, pixelAspect = cls.get_db_resolution(instance) + width, height, pixelAspect = cls.get_folder_resolution(instance) current_renderer = instance.data["renderer"] layer = instance.data["renderlayer"] invalid = False @@ -68,7 +68,7 @@ class ValidateResolution(pyblish.api.InstancePlugin, if current_width != width or current_height != height: cls.log.error( "Render resolution {}x{} does not match " - "asset resolution {}x{}".format( + "folder resolution {}x{}".format( current_width, current_height, width, height )) @@ -76,29 +76,19 @@ class ValidateResolution(pyblish.api.InstancePlugin, if current_pixelAspect != pixelAspect: cls.log.error( "Render pixel aspect {} does not match " - "asset pixel aspect {}".format( + "folder pixel aspect {}".format( current_pixelAspect, pixelAspect )) invalid = True return invalid @classmethod - def get_db_resolution(cls, instance): - asset_doc = instance.data["assetEntity"] - project_doc = instance.context.data["projectEntity"] - for data in [asset_doc["data"], project_doc["data"]]: - if ( - "resolutionWidth" in data and - "resolutionHeight" in data and - "pixelAspect" in data - ): - width = data["resolutionWidth"] - height = data["resolutionHeight"] - pixelAspect = data["pixelAspect"] - return int(width), int(height), float(pixelAspect) - - # Defaults if not found in asset document or project document - return 1920, 1080, 1.0 + def get_folder_resolution(cls, instance): + task_attributes = instance.data["taskEntity"]["attrib"] + width = task_attributes["resolutionWidth"] + height = task_attributes["resolutionHeight"] + pixel_aspect = task_attributes["pixelAspect"] + return int(width), int(height), float(pixel_aspect) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_contents.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_contents.py index be495a8fb9..f05e2b7311 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_contents.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_contents.py @@ -3,11 +3,13 @@ from maya import cmds import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( PublishValidationError, - ValidateContentsOrder + ValidateContentsOrder, + OptionalPyblishPluginMixin ) -class ValidateRigContents(pyblish.api.InstancePlugin): +class ValidateRigContents(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure rig contains pipeline-critical content Every rig must contain at least two object sets: @@ -21,11 +23,14 @@ class ValidateRigContents(pyblish.api.InstancePlugin): hosts = ["maya"] families = ["rig"] action = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = True accepted_output = ["mesh", "transform"] accepted_controllers = ["transform"] def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( @@ -87,9 +92,9 @@ class ValidateRigContents(pyblish.api.InstancePlugin): """Validate missing objectsets in rig sets Args: - instance (str): instance - required_objsets (list): list of objectset names - rig_sets (list): list of rig sets + instance (pyblish.api.Instance): instance + required_objsets (list[str]): list of objectset names + rig_sets (list[str]): list of rig sets Raises: PublishValidationError: When the error is raised, it will show @@ -109,15 +114,15 @@ class ValidateRigContents(pyblish.api.InstancePlugin): Check if all rig set members are within the hierarchy of the rig root Args: - instance (str): instance - content (list): list of content from rig sets + instance (pyblish.api.Instance): instance + content (list[str]): list of content from rig sets Raises: PublishValidationError: It means no dag nodes in the rig instance Returns: - list: invalid hierarchy + List[str]: invalid hierarchy """ # Ensure there are at least some transforms or dag nodes # in the rig instance @@ -140,15 +145,13 @@ class ValidateRigContents(pyblish.api.InstancePlugin): @classmethod def validate_geometry(cls, set_members): - """ - Checks if the node types of the set members valid + """Checks if the node types of the set members valid Args: - set_members: list of nodes of the controls_set - hierarchy: list of nodes which reside under the root node + set_members (list[str]): nodes of the out_set Returns: - errors (list) + list[str]: Nodes of invalid types. """ # Validate all shape types @@ -162,18 +165,17 @@ class ValidateRigContents(pyblish.api.InstancePlugin): if cmds.nodeType(shape) not in cls.accepted_output: invalid.append(shape) + return invalid + @classmethod def validate_controls(cls, set_members): - """ - Checks if the control set members are allowed node types. - Checks if the node types of the set members valid + """Checks if the node types of the set members are valid for controls. Args: - set_members: list of nodes of the controls_set - hierarchy: list of nodes which reside under the root node + set_members (list[str]): list of nodes of the controls_set Returns: - errors (list) + list: Controls of disallowed node types. """ # Validate control types @@ -189,7 +191,7 @@ class ValidateRigContents(pyblish.api.InstancePlugin): """Get the target objectsets and rig sets nodes Args: - instance (str): instance + instance (pyblish.api.Instance): instance Returns: tuple: 2-tuple of list of objectsets, @@ -213,6 +215,7 @@ class ValidateSkeletonRigContents(ValidateRigContents): label = "Skeleton Rig Contents" hosts = ["maya"] families = ["rig.fbx"] + optional = True @classmethod def get_invalid(cls, instance): @@ -247,11 +250,10 @@ class ValidateSkeletonRigContents(ValidateRigContents): """Get the target objectsets and rig sets nodes Args: - instance (str): instance + instance (pyblish.api.Instance): instance Returns: - tuple: 2-tuple of list of objectsets, - list of rig sets nodes + tuple: 2-tuple of list of objectsets, list of rig sets nodes """ objectsets = ["skeletonMesh_SET"] skeleton_mesh_nodes = instance.data.get("skeleton_mesh", []) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers.py index 469412dd1a..814ff50177 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers.py @@ -5,13 +5,15 @@ import pyblish.api from ayon_core.pipeline.publish import ( ValidateContentsOrder, RepairAction, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) import ayon_core.hosts.maya.api.action from ayon_core.hosts.maya.api.lib import undo_chunk -class ValidateRigControllers(pyblish.api.InstancePlugin): +class ValidateRigControllers(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate rig controllers. Controls must have the transformation attributes on their default @@ -33,6 +35,7 @@ class ValidateRigControllers(pyblish.api.InstancePlugin): label = "Rig Controllers" hosts = ["maya"] families = ["rig"] + optional = True actions = [RepairAction, ayon_core.hosts.maya.api.action.SelectInvalidAction] @@ -50,6 +53,9 @@ class ValidateRigControllers(pyblish.api.InstancePlugin): } def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py index 2227899a5b..ea2de81036 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_controllers_arnold_attributes.py @@ -5,14 +5,16 @@ import pyblish.api from ayon_core.pipeline.publish import ( ValidateContentsOrder, RepairAction, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) from ayon_core.hosts.maya.api import lib import ayon_core.hosts.maya.api.action -class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): +class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate rig control curves have no keyable arnold attributes. The Arnold plug-in will create curve attributes like: @@ -35,6 +37,7 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): label = "Rig Controllers (Arnold Attributes)" hosts = ["maya"] families = ["rig"] + optional = False actions = [RepairAction, ayon_core.hosts.maya.api.action.SelectInvalidAction] @@ -48,6 +51,9 @@ class ValidateRigControllersArnoldAttributes(pyblish.api.InstancePlugin): ] def process(self, instance): + if not self.is_active(instance.data): + return + invalid = self.get_invalid(instance) if invalid: raise PublishValidationError('{} failed, see log ' diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py index bb5ec8353e..78cc3f5938 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_joints_hidden.py @@ -7,11 +7,13 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateRigJointsHidden(pyblish.api.InstancePlugin): +class ValidateRigJointsHidden(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate all joints are hidden visually. This includes being hidden: @@ -28,6 +30,7 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin): label = "Joints Hidden" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction] + optional = True @staticmethod def get_invalid(instance): @@ -36,6 +39,8 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin): def process(self, instance): """Process all the nodes in the instance 'objectSet'""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py index dccf9cc47b..d94ddc5f2a 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_out_set_node_ids.py @@ -7,11 +7,15 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( RepairAction, ValidateContentsOrder, - PublishValidationError + PublishXmlValidationError, + OptionalPyblishPluginMixin, + get_plugin_settings, + apply_plugin_settings_automatically ) -class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): +class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate if deformed shapes have related IDs to the original shapes. When a deformer is applied in the scene on a referenced mesh that already @@ -30,16 +34,44 @@ class ValidateRigOutSetNodeIds(pyblish.api.InstancePlugin): RepairAction ] allow_history_only = False + optional = False + + @classmethod + def apply_settings(cls, project_settings): + # Preserve automatic settings applying logic + settings = get_plugin_settings(plugin=cls, + project_settings=project_settings, + log=cls.log, + category="maya") + apply_plugin_settings_automatically(cls, settings, logger=cls.log) + + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return def process(self, instance): """Process all meshes""" - + if not self.is_active(instance.data): + return # Ensure all nodes have a cbId and a related ID to the original shapes # if a deformer has been created on the shape invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError( - "Nodes found with mismatching IDs: {0}".format(invalid) + + # Use the short names + invalid = cmds.ls(invalid) + invalid.sort() + + # Construct a human-readable list + invalid = "\n".join("- {}".format(node) for node in invalid) + + raise PublishXmlValidationError( + plugin=ValidateRigOutSetNodeIds, + message=( + "Rig nodes have different IDs than their input " + "history: \n{0}".format(invalid) + ) ) @classmethod @@ -114,6 +146,7 @@ class ValidateSkeletonRigOutSetNodeIds(ValidateRigOutSetNodeIds): families = ["rig.fbx"] hosts = ['maya'] label = 'Skeleton Rig Out Set Node Ids' + optional = False @classmethod def get_node(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_output_ids.py b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_output_ids.py index 93552ccce0..d04006f013 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_rig_output_ids.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_rig_output_ids.py @@ -32,6 +32,13 @@ class ValidateRigOutputIds(pyblish.api.InstancePlugin): actions = [RepairAction, ayon_core.hosts.maya.api.action.SelectInvalidAction] + @classmethod + def apply_settings(cls, project_settings): + # Disable plug-in if cbId workflow is disabled + if not project_settings["maya"].get("use_cbid_workflow", True): + cls.enabled = False + return + def process(self, instance): invalid = self.get_invalid(instance, compute=True) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py b/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py index 6e68cf5d14..c7d5de2050 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_scene_set_workspace.py @@ -46,6 +46,6 @@ class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin): raise PublishValidationError( "Maya workspace is not set correctly.\n\n" f"Current workfile `{scene_name}` is not inside the " - "current Maya project root directory `{root_dir}`.\n\n" + f"current Maya project root directory `{root_dir}`.\n\n" "Please use Workfile app to re-save." ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py b/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py index 906f6fbd1a..f88e33fdfb 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_setdress_root.py @@ -1,5 +1,8 @@ import pyblish.api -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) class ValidateSetdressRoot(pyblish.api.InstancePlugin): @@ -20,4 +23,6 @@ class ValidateSetdressRoot(pyblish.api.InstancePlugin): root = cmds.ls(set_member, assemblies=True, long=True) if not root or root[0] not in set_member: - raise Exception("Setdress top root node is not being published.") + raise PublishValidationError( + "Setdress top root node is not being published." + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py index 86ca0ca400..09c17202c5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shader_name.py @@ -51,7 +51,7 @@ class ValidateShaderName(pyblish.api.InstancePlugin, descendants = cmds.ls(descendants, noIntermediate=True, long=True) shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True) - asset_name = instance.data.get("folderPath") + folder_path = instance.data.get("folderPath") # Check the number of connected shadingEngines per shape regex_compile = re.compile(cls.regex) @@ -71,12 +71,12 @@ class ValidateShaderName(pyblish.api.InstancePlugin, cls.log.error(error_message.format(shape, shader)) else: if 'asset' in regex_compile.groupindex: - if m.group('asset') != asset_name: + if m.group('asset') != folder_path: invalid.append(shape) message = error_message - message += " with missing asset name \"{2}\"" + message += " with missing folder path \"{2}\"" cls.log.error( - message.format(shape, shader, asset_name) + message.format(shape, shader, folder_path) ) return invalid diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py index 2f0811a73e..c4c4c909d3 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_default_names.py @@ -8,7 +8,8 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, RepairAction, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -84,8 +85,8 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin, invalid = self.get_invalid(instance) if invalid: - raise ValueError("Incorrectly named shapes " - "found: {0}".format(invalid)) + raise PublishValidationError( + "Incorrectly named shapes found: {0}".format(invalid)) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py index ffdb43ef55..52ce3c5436 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_render_stats.py @@ -6,10 +6,13 @@ import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, ValidateMeshOrder, + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateShapeRenderStats(pyblish.api.Validator): +class ValidateShapeRenderStats(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Ensure all render stats are set to the default values.""" order = ValidateMeshOrder @@ -34,31 +37,50 @@ class ValidateShapeRenderStats(pyblish.api.Validator): # It seems the "surfaceShape" and those derived from it have # `renderStat` attributes. shapes = cmds.ls(instance, long=True, type='surfaceShape') - invalid = [] + invalid = set() for shape in shapes: - _iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items) - for attr, default_value in _iteritems(): + for attr, default_value in cls.defaults.items(): if cmds.attributeQuery(attr, node=shape, exists=True): value = cmds.getAttr('{}.{}'.format(shape, attr)) if value != default_value: - invalid.append(shape) + invalid.add(shape) return invalid def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) + if not invalid: + return - if invalid: - raise ValueError("Shapes with non-default renderStats " - "found: {0}".format(invalid)) + defaults_str = "\n".join( + "- {}: {}\n".format(key, value) + for key, value in self.defaults.items() + ) + description = ( + "## Shape Default Render Stats\n" + "Shapes are detected with non-default render stats.\n\n" + "To ensure a model's shapes behave like a shape would by default " + "we require the render stats to have not been altered in " + "the published models.\n\n" + "### How to repair?\n" + "You can reset the default values on the shapes by using the " + "repair action." + ) + + raise PublishValidationError( + "Shapes with non-default renderStats " + "found: {0}".format(", ".join(sorted(invalid))), + description=description, + detail="The expected default values " + "are:\n\n{}".format(defaults_str) + ) @classmethod def repair(cls, instance): for shape in cls.get_invalid(instance): - _iteritems = getattr(cls.defaults, "iteritems", cls.defaults.items) - for attr, default_value in _iteritems(): - + for attr, default_value in cls.defaults.items(): if cmds.attributeQuery(attr, node=shape, exists=True): plug = '{0}.{1}'.format(shape, attr) value = cmds.getAttr(plug) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py index 6cf3edf472..6c89258085 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_shape_zero.py @@ -7,11 +7,13 @@ from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( ValidateContentsOrder, RepairAction, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateShapeZero(pyblish.api.Validator): +class ValidateShapeZero(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Shape components may not have any "tweak" values To solve this issue, try freezing the shapes. @@ -26,6 +28,7 @@ class ValidateShapeZero(pyblish.api.Validator): ayon_core.hosts.maya.api.action.SelectInvalidAction, RepairAction ] + optional = True @staticmethod def get_invalid(instance): @@ -65,6 +68,8 @@ class ValidateShapeZero(pyblish.api.Validator): def process(self, instance): """Process all the nodes in the instance "objectSet""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py b/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py index 1987f93e32..f5d73553d3 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_single_assembly.py @@ -1,5 +1,8 @@ import pyblish.api -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError +) class ValidateSingleAssembly(pyblish.api.InstancePlugin): @@ -30,7 +33,11 @@ class ValidateSingleAssembly(pyblish.api.InstancePlugin): # ensure unique (somehow `maya.cmds.ls` doesn't manage that) assemblies = set(assemblies) - assert len(assemblies) > 0, ( - "One assembly required for: %s (currently empty?)" % instance) - assert len(assemblies) < 2, ( - 'Multiple assemblies found: %s' % assemblies) + if len(assemblies) == 0: + raise PublishValidationError( + "One assembly required for: %s (currently empty?)" % instance + ) + elif len(assemblies) > 1: + raise PublishValidationError( + 'Multiple assemblies found: %s' % assemblies + ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py b/client/ayon_core/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py index ff2ad822b6..172453f1ef 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py @@ -4,20 +4,25 @@ import pyblish.api from ayon_core.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, + OptionalPyblishPluginMixin ) from maya import cmds -class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin): +class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates that nodes has common root.""" order = ValidateContentsOrder hosts = ["maya"] families = ["skeletalMesh"] label = "Skeletal Mesh Top Node" + optional = False def process(self, instance): + if not self.is_active(instance.data): + return geo = instance.data.get("geometry") joints = instance.data.get("joints") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_skeleton_top_group_hierarchy.py b/client/ayon_core/hosts/maya/plugins/publish/validate_skeleton_top_group_hierarchy.py index 7c876240ae..9fbe0f440b 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_skeleton_top_group_hierarchy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_skeleton_top_group_hierarchy.py @@ -21,8 +21,12 @@ class ValidateSkeletonTopGroupHierarchy(pyblish.api.InstancePlugin, order = ValidateContentsOrder + 0.05 label = "Skeleton Rig Top Group Hierarchy" families = ["rig.fbx"] + optional = True def process(self, instance): + if not self.is_active(instance.data): + return + invalid = [] skeleton_mesh_data = instance.data("skeleton_mesh", []) if skeleton_mesh_data: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py b/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py index c104f0477f..a548e12f33 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_skinCluster_deformer_set.py @@ -3,10 +3,15 @@ from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) -class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin): +class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate skinClusters on meshes have valid member relationships. In rare cases it can happen that a mesh has a skinCluster in its history @@ -20,14 +25,19 @@ class ValidateSkinclusterDeformerSet(pyblish.api.InstancePlugin): families = ['fbx'] label = "Skincluster Deformer Relationships" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): """Process all the transform nodes in the instance""" + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Invalid skinCluster relationships " - "found on meshes: {0}".format(invalid)) + raise PublishValidationError( + "Invalid skinCluster relationships found on meshes: {0}" + .format(invalid) + ) @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py b/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py index 524c7b29ba..a276a5b644 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_step_size.py @@ -3,11 +3,13 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( PublishValidationError, - ValidateContentsOrder + ValidateContentsOrder, + OptionalPyblishPluginMixin ) -class ValidateStepSize(pyblish.api.InstancePlugin): +class ValidateStepSize(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates the step size for the instance is in a valid range. For example the `step` size should never be lower or equal to zero. @@ -20,14 +22,14 @@ class ValidateStepSize(pyblish.api.InstancePlugin): 'pointcache', 'animation'] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] - + optional = False MIN = 0.01 MAX = 1.0 @classmethod def get_invalid(cls, instance): - objset = instance.data['name'] + objset = instance.data['instance_node'] step = instance.data.get("step", 1.0) if step < cls.MIN or step > cls.MAX: @@ -40,8 +42,9 @@ class ValidateStepSize(pyblish.api.InstancePlugin): return [] def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( - "Invalid instances found: {0}".format(invalid)) + "Instance found with invalid step size: {0}".format(invalid)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py index ddf9d3867d..cd96ebb10d 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_transform_zero.py @@ -1,15 +1,18 @@ -from maya import cmds +import inspect +from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateTransformZero(pyblish.api.Validator): +class ValidateTransformZero(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Transforms can't have any values To solve this issue, try freezing the transforms. So long @@ -29,6 +32,7 @@ class ValidateTransformZero(pyblish.api.Validator): 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0] _tolerance = 1e-30 + optional = True @classmethod def get_invalid(cls, instance): @@ -54,7 +58,7 @@ class ValidateTransformZero(pyblish.api.Validator): if ('_LOC' in transform) or ('_loc' in transform): continue mat = cmds.xform(transform, q=1, matrix=True, objectSpace=True) - if not all(abs(x-y) < cls._tolerance + if not all(abs(x - y) < cls._tolerance for x, y in zip(cls._identity, mat)): invalid.append(transform) @@ -62,17 +66,28 @@ class ValidateTransformZero(pyblish.api.Validator): def process(self, instance): """Process all the nodes in the instance "objectSet""" - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - names = "
".join( " - {}".format(node) for node in invalid ) raise PublishValidationError( title="Transform Zero", + description=self.get_description(), message="The model publish allows no transformations. You must" " freeze transformations to continue.

" - "Nodes found with transform values: " + "Nodes found with transform values:
" "{0}".format(names)) + + @staticmethod + def get_description(): + return inspect.cleandoc("""### Transform can't have any values + + The model publish allows no transformations. + + You must **freeze transformations** to continue. + + """) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py index 55f8933fff..0066d70531 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unique_names.py @@ -2,10 +2,15 @@ from maya import cmds import pyblish.api import ayon_core.hosts.maya.api.action -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) -class ValidateUniqueNames(pyblish.api.Validator): +class ValidateUniqueNames(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """transform names should be unique ie: using cmds.ls(someNodeName) should always return shortname @@ -17,6 +22,7 @@ class ValidateUniqueNames(pyblish.api.Validator): families = ["model"] label = "Unique transform name" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = True @staticmethod def get_invalid(instance): @@ -32,8 +38,9 @@ class ValidateUniqueNames(pyblish.api.Validator): def process(self, instance): """Process all the nodes in the instance "objectSet""" - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Nodes found with none unique names. " - "values: {0}".format(invalid)) + raise PublishValidationError( + "Nodes found with non-unique names:\n{0}".format(invalid)) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py index a39ba7c4cc..6440c00eae 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py @@ -3,11 +3,16 @@ from maya import cmds import pyblish.api -from ayon_core.pipeline.publish import ValidateMeshOrder +from ayon_core.pipeline.publish import ( + ValidateMeshOrder, + OptionalPyblishPluginMixin, + PublishValidationError +) import ayon_core.hosts.maya.api.action -class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin): +class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate if mesh is made of triangles for Unreal Engine""" order = ValidateMeshOrder @@ -22,14 +27,16 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin): invalid = [] meshes = cmds.ls(instance, type="mesh", long=True) for mesh in meshes: - faces = cmds.polyEvaluate(mesh, f=True) - tris = cmds.polyEvaluate(mesh, t=True) + faces = cmds.polyEvaluate(mesh, face=True) + tris = cmds.polyEvaluate(mesh, triangle=True) if faces != tris: invalid.append(mesh) return invalid def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) - assert len(invalid) == 0, ( - "Found meshes without triangles") + if invalid: + raise PublishValidationError("Found meshes without triangles") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py index c9860d27a0..88b0ff0e71 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_staticmesh_naming.py @@ -100,8 +100,8 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin, cl_r = re.compile(regex_collision) - asset_name = instance.data["assetEntity"]["name"] - mesh_name = "{}{}".format(asset_name, + folder_name = instance.data["folderEntity"]["name"] + mesh_name = "{}{}".format(folder_name, instance.data.get("variant", [])) for obj in collision_set: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py index ef7296e628..f7acd41cea 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_unreal_up_axis.py @@ -6,7 +6,8 @@ import pyblish.api from ayon_core.pipeline.publish import ( ValidateContentsOrder, RepairAction, - OptionalPyblishPluginMixin + OptionalPyblishPluginMixin, + PublishValidationError ) @@ -26,9 +27,10 @@ class ValidateUnrealUpAxis(pyblish.api.ContextPlugin, if not self.is_active(context.data): return - assert cmds.upAxis(q=True, axis=True) == "z", ( - "Invalid axis set as up axis" - ) + if cmds.upAxis(q=True, axis=True) != "z": + raise PublishValidationError( + "Invalid axis set as up axis" + ) @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py b/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py index 29cf9420a3..1fdb476dba 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_visible_only.py @@ -4,11 +4,13 @@ from ayon_core.hosts.maya.api.lib import iter_visible_nodes_in_range import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): +class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validates at least a single node is visible in frame range. This validation only validates if the `visibleOnly` flag is enabled @@ -20,9 +22,11 @@ class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): hosts = ["maya"] families = ["pointcache", "animation"] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): - + if not self.is_active(instance.data): + return if not instance.data.get("visibleOnly", False): self.log.debug("Visible only is disabled. Validation skipped..") return @@ -30,8 +34,9 @@ class ValidateAlembicVisibleOnly(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: start, end = self.get_frame_range(instance) - raise PublishValidationError("No visible nodes found in " - "frame range {}-{}.".format(start, end)) + raise PublishValidationError( + f"No visible nodes found in frame range {start}-{end}." + ) @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py index 54eaa58e74..b3978b8483 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_distributed_rendering.py @@ -3,10 +3,16 @@ from maya import cmds from ayon_core.hosts.maya.api import lib from ayon_core.pipeline.publish import ( - PublishValidationError, RepairAction, ValidateContentsOrder) + KnownPublishError, + PublishValidationError, + RepairAction, + ValidateContentsOrder, + OptionalPyblishPluginMixin +) -class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin): +class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate V-Ray Distributed Rendering is ignored in batch mode. Whenever Distributed Rendering is enabled for V-Ray in the render settings @@ -20,19 +26,24 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin): label = "VRay Distributed Rendering" families = ["renderlayer"] actions = [RepairAction] + optional = False # V-Ray attribute names enabled_attr = "vraySettings.sys_distributed_rendering_on" ignored_attr = "vraySettings.sys_distributed_rendering_ignore_batch" def process(self, instance): - + if not self.is_active(instance.data): + return if instance.data.get("renderer") != "vray": - # If not V-Ray ignore.. + # If not V-Ray, ignore return vray_settings = cmds.ls("vraySettings", type="VRaySettingsNode") - assert vray_settings, "Please ensure a VRay Settings Node is present" + if not vray_settings: + raise KnownPublishError( + "Please ensure a VRay Settings Node is present" + ) renderlayer = instance.data['renderlayer'] @@ -44,8 +55,8 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin): # during batch mode we invalidate the instance if not lib.get_attr_in_layer(self.ignored_attr, layer=renderlayer): raise PublishValidationError( - ("Renderlayer has distributed rendering enabled " - "but is not set to ignore in batch mode.")) + "Renderlayer has distributed rendering enabled " + "but is not set to ignore in batch mode.") @classmethod def repair(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py index d4e53d69dc..9df5fb8488 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_referenced_aovs.py @@ -4,10 +4,15 @@ import pyblish.api import types from maya import cmds -from ayon_core.pipeline.publish import RepairContextAction +from ayon_core.pipeline.publish import ( + RepairContextAction, + OptionalPyblishPluginMixin, + PublishValidationError +) -class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): +class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate whether the V-Ray Render Elements (AOVs) include references. This will check if there are AOVs pulled from references. If @@ -21,9 +26,12 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): hosts = ['maya'] families = ['renderlayer'] actions = [RepairContextAction] + optional = False def process(self, instance): """Plugin main entry point.""" + if not self.is_active(instance.data): + return if instance.data.get("renderer") != "vray": # If not V-Ray ignore.. return @@ -39,7 +47,7 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): self.log.warning(( "Referenced AOVs are enabled in Vray " "Render Settings and are detected in scene, but " - "Pype render instance option for referenced AOVs is " + "AYON render instance option for referenced AOVs is " "disabled. Those AOVs will be rendered but not published " "by Pype." )) @@ -54,7 +62,7 @@ class ValidateVrayReferencedAOVs(pyblish.api.InstancePlugin): self.log.error(( "'Use referenced' not enabled in Vray Render Settings." )) - raise AssertionError("Invalid render settings") + raise PublishValidationError("Invalid render settings") @classmethod def repair(cls, context): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_translator_settings.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_translator_settings.py index f366ee60cf..a3d93dd9c0 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vray_translator_settings.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vray_translator_settings.py @@ -5,22 +5,27 @@ from ayon_core.pipeline.publish import ( context_plugin_should_run, RepairContextAction, ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) from maya import cmds -class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin): +class ValidateVRayTranslatorEnabled(pyblish.api.ContextPlugin, + OptionalPyblishPluginMixin): """Validate VRay Translator settings for extracting vrscenes.""" order = ValidateContentsOrder label = "VRay Translator Settings" families = ["vrayscene_layer"] actions = [RepairContextAction] + optional = False def process(self, context): """Plugin entry point.""" + if not self.is_active(context.data): + return # Workaround bug pyblish-base#250 if not context_plugin_should_run(self, context): return diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py index 7e16006f97..0288d4b865 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy.py @@ -1,31 +1,37 @@ import pyblish.api -from ayon_core.pipeline import KnownPublishError +from ayon_core.pipeline.publish import ( + OptionalPyblishPluginMixin, + PublishValidationError +) -class ValidateVrayProxy(pyblish.api.InstancePlugin): +class ValidateVrayProxy(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): order = pyblish.api.ValidatorOrder label = "VRay Proxy Settings" hosts = ["maya"] families = ["vrayproxy"] + optional = False def process(self, instance): data = instance.data - + if not self.is_active(data): + return if not data["setMembers"]: - raise KnownPublishError( - "'%s' is empty! This is a bug" % instance.name + raise PublishValidationError( + f"Instance '{instance.name}' is empty." ) if data["animation"]: if data["frameEnd"] < data["frameStart"]: - raise KnownPublishError( + raise PublishValidationError( "End frame is smaller than start frame" ) if not data["vrmesh"] and not data["alembic"]: - raise KnownPublishError( + raise PublishValidationError( "Both vrmesh and alembic are off. Needs at least one to" " publish." ) diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy_members.py b/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy_members.py index 1a52771ee6..6732d09202 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy_members.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_vrayproxy_members.py @@ -4,12 +4,14 @@ from maya import cmds import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateVrayProxyMembers(pyblish.api.InstancePlugin): +class ValidateVrayProxyMembers(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate whether the V-Ray Proxy instance has shape members""" order = pyblish.api.ValidatorOrder @@ -17,9 +19,11 @@ class ValidateVrayProxyMembers(pyblish.api.InstancePlugin): hosts = ['maya'] families = ['vrayproxy'] actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py b/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py index e2c006be9f..7e0f01c482 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_xgen.py @@ -34,7 +34,7 @@ class ValidateXgen(pyblish.api.InstancePlugin): " Node type found: {}".format(node_type) ) - # Cant have inactive modifiers in collection cause Xgen will try and + # Can't have inactive modifiers in collection cause Xgen will try and # look for them when loading. palette = instance.data["xgmPalette"].replace("|", "") inactive_modifiers = {} diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py index a72d930339..086cb7b1f5 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_renderscript_callbacks.py @@ -1,10 +1,15 @@ from maya import cmds import pyblish.api -from ayon_core.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin +) -class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin): +class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Check if the render script callbacks will be used during the rendering In order to ensure the render tasks are executed properly we need to check @@ -24,6 +29,7 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin): label = "Yeti Render Script Callbacks" hosts = ["maya"] families = ["renderlayer"] + optional = False # Settings per renderer callbacks = { @@ -37,11 +43,12 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin): } def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise ValueError("Invalid render callbacks found for '%s'!" - % instance.name) + raise PublishValidationError( + f"Invalid render callbacks found for '{instance.name}'.") @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py index 22545d07fb..84614fc0be 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_cache_state.py @@ -1,14 +1,17 @@ +import inspect + import pyblish.api import maya.cmds as cmds import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( RepairAction, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) - -class ValidateYetiRigCacheState(pyblish.api.InstancePlugin): +class ValidateYetiRigCacheState(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate the I/O attributes of the node Every pgYetiMaya cache node per instance should have: @@ -23,11 +26,17 @@ class ValidateYetiRigCacheState(pyblish.api.InstancePlugin): families = ["yetiRig"] actions = [RepairAction, ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: - raise PublishValidationError("Nodes have incorrect I/O settings") + raise PublishValidationError( + "Nodes have incorrect I/O settings", + description=inspect.getdoc(self) + ) @classmethod def get_invalid(cls, instance): diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py index 3d9d8faca8..77e189e37b 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_input_in_instance.py @@ -5,11 +5,13 @@ import pyblish.api import ayon_core.hosts.maya.api.action from ayon_core.pipeline.publish import ( ValidateContentsOrder, - PublishValidationError + PublishValidationError, + OptionalPyblishPluginMixin ) -class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator): +class ValidateYetiRigInputShapesInInstance(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate if all input nodes are part of the instance's hierarchy""" order = ValidateContentsOrder @@ -17,9 +19,11 @@ class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator): families = ["yetiRig"] label = "Yeti Rig Input Shapes In Instance" actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] + optional = False def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError("Yeti Rig has invalid input meshes") diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_settings.py b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_settings.py index 22a5ccbaca..6bd2ebb753 100644 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_settings.py +++ b/client/ayon_core/hosts/maya/plugins/publish/validate_yeti_rig_settings.py @@ -1,9 +1,13 @@ import pyblish.api -from ayon_core.pipeline.publish import PublishValidationError +from ayon_core.pipeline.publish import ( + PublishValidationError, + OptionalPyblishPluginMixin +) -class ValidateYetiRigSettings(pyblish.api.InstancePlugin): +class ValidateYetiRigSettings(pyblish.api.InstancePlugin, + OptionalPyblishPluginMixin): """Validate Yeti Rig Settings have collected input connections. The input connections are collected for the nodes in the `input_SET`. @@ -15,9 +19,11 @@ class ValidateYetiRigSettings(pyblish.api.InstancePlugin): order = pyblish.api.ValidatorOrder label = "Yeti Rig Settings" families = ["yetiRig"] + optional = False def process(self, instance): - + if not self.is_active(instance.data): + return invalid = self.get_invalid(instance) if invalid: raise PublishValidationError( diff --git a/client/ayon_core/hosts/maya/startup/userSetup.py b/client/ayon_core/hosts/maya/startup/userSetup.py index adbbfe4f44..3112e2bf12 100644 --- a/client/ayon_core/hosts/maya/startup/userSetup.py +++ b/client/ayon_core/hosts/maya/startup/userSetup.py @@ -10,7 +10,7 @@ from maya import cmds host = MayaHost() install_host(host) -print("Starting OpenPype usersetup...") +print("Starting AYON usersetup...") project_name = get_current_project_name() settings = get_project_settings(project_name) @@ -47,4 +47,4 @@ if bool(int(os.environ.get(key, "0"))): ) -print("Finished OpenPype usersetup.") +print("Finished AYON usersetup.") diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py index 0969666484..44d8dfda21 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/app.py @@ -2,10 +2,10 @@ import sys import time import logging +import ayon_api from qtpy import QtWidgets, QtCore from ayon_core import style -from ayon_core.client import get_last_version_by_subset_id from ayon_core.pipeline import get_current_project_name from ayon_core.tools.utils.lib import qt_app_context from ayon_core.hosts.maya.api.lib import ( @@ -227,9 +227,9 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): # (since assigning multiple to the same nodes makes no sense) assign_look = next( ( - subset_doc - for subset_doc in item["looks"] - if subset_doc["name"] in looks + product_entity + for product_entity in item["looks"] + if product_entity["name"] in looks ), None ) @@ -240,8 +240,8 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): continue # Get the latest version of this asset's look product - version = get_last_version_by_subset_id( - project_name, assign_look["_id"], fields=["_id"] + version_entity = ayon_api.get_last_version_by_product_id( + project_name, assign_look["id"], fields={"id"} ) product_name = assign_look["name"] @@ -283,7 +283,9 @@ class MayaLookAssignerWindow(QtWidgets.QWidget): # Assign look if nodes: - assign_look_by_version(nodes, version_id=version["_id"]) + assign_look_by_version( + nodes, version_id=version_entity["id"] + ) end = time.time() diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py index 810e1fc88c..a20880dffc 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/arnold_standin.py @@ -4,10 +4,11 @@ from collections import defaultdict import logging from maya import cmds +import ayon_api from ayon_core.pipeline import get_current_project_name -from ayon_core.client import get_last_version_by_subset_name from ayon_core.hosts.maya import api + from . import lib from .alembic import get_alembic_ids_cache from .usd import is_usd_lib_supported, get_usd_ids_cache @@ -136,33 +137,34 @@ def assign_look(standin, product_name): nodes_by_id = get_nodes_by_id(standin) - # Group by asset id so we run over the look per asset - node_ids_by_asset_id = defaultdict(set) + # Group by folder id so we run over the look per folder + node_ids_by_folder_id = defaultdict(set) for node_id in nodes_by_id: - asset_id = node_id.split(":", 1)[0] - node_ids_by_asset_id[asset_id].add(node_id) + folder_id = node_id.split(":", 1)[0] + node_ids_by_folder_id[folder_id].add(node_id) project_name = get_current_project_name() - for asset_id, node_ids in node_ids_by_asset_id.items(): + for folder_id, node_ids in node_ids_by_folder_id.items(): # Get latest look version - version = get_last_version_by_subset_name( + version_entity = ayon_api.get_last_version_by_product_name( project_name, - subset_name=product_name, - asset_id=asset_id, - fields=["_id"] + product_name, + folder_id, + fields={"id"} ) - if not version: + if not version_entity: log.info("Didn't find last version for product name {}".format( product_name )) continue + version_id = version_entity["id"] - relationships = lib.get_look_relationships(version["_id"]) - shader_nodes, container_node = lib.load_look(version["_id"]) + relationships = lib.get_look_relationships(version_id) + shader_nodes, container_node = lib.load_look(version_id) namespace = shader_nodes[0].split(":")[0] - # Get only the node ids and paths related to this asset + # Get only the node ids and paths related to this folder # And get the shader edits the look supplies asset_nodes_by_id = { node_id: nodes_by_id[node_id] for node_id in node_ids diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py index 4375d38316..ad43a24385 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/commands.py @@ -2,9 +2,9 @@ import os import logging from collections import defaultdict +import ayon_api import maya.cmds as cmds -from ayon_core.client import get_assets, get_asset_name_identifier from ayon_core.pipeline import ( remove_container, registered_host, @@ -49,8 +49,9 @@ def get_selected_nodes(): """Get information from current selection""" selection = cmds.ls(selection=True, long=True) - hierarchy = lib.get_all_children(selection) - return list(set(selection + hierarchy)) + hierarchy = lib.get_all_children(selection, + ignore_intermediate_objects=True) + return list(hierarchy.union(selection)) def get_all_asset_nodes(): @@ -62,7 +63,7 @@ def get_all_asset_nodes(): return cmds.ls(dag=True, noIntermediate=True, long=True) -def create_asset_id_hash(nodes): +def create_folder_id_hash(nodes): """Create a hash based on cbId attribute value Args: nodes (list): a list of nodes @@ -74,10 +75,10 @@ def create_asset_id_hash(nodes): for node in nodes: # iterate over content of reference node if cmds.nodeType(node) == "reference": - ref_hashes = create_asset_id_hash( + ref_hashes = create_folder_id_hash( list(set(cmds.referenceQuery(node, nodes=True, dp=True)))) - for asset_id, ref_nodes in ref_hashes.items(): - node_id_hash[asset_id] += ref_nodes + for folder_id, ref_nodes in ref_hashes.items(): + node_id_hash[folder_id] += ref_nodes elif cmds.pluginInfo('vrayformaya', query=True, loaded=True) and cmds.nodeType( node) == "VRayProxy": @@ -95,8 +96,8 @@ def create_asset_id_hash(nodes): if value is None: continue - asset_id = value.split(":")[0] - node_id_hash[asset_id].append(node) + folder_id = value.split(":")[0] + node_id_hash[folder_id].append(node) return dict(node_id_hash) @@ -104,10 +105,10 @@ def create_asset_id_hash(nodes): def create_items_from_nodes(nodes): """Create an item for the view based the container and content of it - It fetches the look document based on the asset ID found in the content. + It fetches the look document based on the folder id found in the content. The item will contain all important information for the tool to work. - If there is an asset ID which is not registered in the project's collection + If there is an folder id which is not registered in the project's collection it will log a warning message. Args: @@ -118,54 +119,55 @@ def create_items_from_nodes(nodes): """ - asset_view_items = [] + folder_view_items = [] - id_hashes = create_asset_id_hash(nodes) + id_hashes = create_folder_id_hash(nodes) if not id_hashes: log.warning("No id hashes") - return asset_view_items + return folder_view_items project_name = get_current_project_name() - asset_ids = set(id_hashes.keys()) - fields = {"_id", "name", "data.parents"} - asset_docs = get_assets(project_name, asset_ids, fields=fields) - asset_docs_by_id = { - str(asset_doc["_id"]): asset_doc - for asset_doc in asset_docs + folder_ids = set(id_hashes.keys()) + + folder_entities = ayon_api.get_folders( + project_name, folder_ids, fields={"id", "path"} + ) + folder_entities_by_id = { + folder_entity["id"]: folder_entity + for folder_entity in folder_entities } - for asset_id, id_nodes in id_hashes.items(): - asset_doc = asset_docs_by_id.get(asset_id) - # Skip if asset id is not found - if not asset_doc: + for folder_id, id_nodes in id_hashes.items(): + folder_entity = folder_entities_by_id.get(folder_id) + # Skip if folder id is not found + if not folder_entity: log.warning( - "Id found on {num} nodes for which no asset is found database," - " skipping '{asset_id}'".format( + "Id found on {num} nodes for which no folder is found database," + " skipping '{folder_id}'".format( num=len(nodes), - asset_id=asset_id + folder_id=folder_id ) ) continue - # Collect available look products for this asset - looks = lib.list_looks(project_name, asset_doc["_id"]) + # Collect available look products for this folder + looks = lib.list_looks(project_name, folder_entity["id"]) - # Collect namespaces the asset is found in + # Collect namespaces the folder is found in namespaces = set() for node in id_nodes: namespace = get_namespace_from_node(node) namespaces.add(namespace) - label = get_asset_name_identifier(asset_doc) - asset_view_items.append({ - "label": label, - "asset": asset_doc, + folder_view_items.append({ + "label": folder_entity["path"], + "folder_entity": folder_entity, "looks": looks, "namespaces": namespaces }) - return asset_view_items + return folder_view_items def remove_unused_looks(): diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/lib.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/lib.py index e3ebddb7d4..78fded12a9 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/lib.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/lib.py @@ -1,6 +1,8 @@ import json import logging +from ayon_api import get_representation_by_name + from ayon_core.pipeline import ( get_current_project_name, get_representation_path, @@ -9,7 +11,6 @@ from ayon_core.pipeline import ( loaders_from_representation, load_container ) -from ayon_core.client import get_representation_by_name from ayon_core.hosts.maya.api import lib @@ -29,7 +30,7 @@ def get_look_relationships(version_id): project_name = get_current_project_name() json_representation = get_representation_by_name( - project_name, representation_name="json", version_id=version_id + project_name, "json", version_id ) # Load relationships @@ -57,12 +58,12 @@ def load_look(version_id): project_name = get_current_project_name() # Get representations of shader file and relationships look_representation = get_representation_by_name( - project_name, representation_name="ma", version_id=version_id + project_name, "ma", version_id ) # See if representation is already loaded, if so reuse it. host = registered_host() - representation_id = str(look_representation['_id']) + representation_id = look_representation["id"] for container in host.ls(): if (container['loader'] == "LookLoader" and container['representation'] == representation_id): diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py index 4892125954..b0807be6a6 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/models.py @@ -29,7 +29,8 @@ class AssetModel(models.TreeModel): self.beginResetModel() # Add the items sorted by label - sorter = lambda x: x["label"] + def sorter(x): + return x["label"] for item in sorted(items, key=sorter): @@ -104,12 +105,12 @@ class LookModel(models.TreeModel): # Collect the assets per look name (from the items of the AssetModel) look_products = defaultdict(list) for asset_item in items: - asset = asset_item["asset"] + folder_entity = asset_item["folder_entity"] for look in asset_item["looks"]: - look_products[look["name"]].append(asset) + look_products[look["name"]].append(folder_entity) for product_name in sorted(look_products.keys()): - assets = look_products[product_name] + folder_entities = look_products[product_name] # Define nice label without "look" prefix for readability label = ( @@ -123,10 +124,10 @@ class LookModel(models.TreeModel): item_node["product"] = product_name # Amount of matching assets for this look - item_node["match"] = len(assets) + item_node["match"] = len(folder_entities) # Store the assets that have this product available - item_node["assets"] = assets + item_node["folder_entities"] = folder_entities self.add_child(item_node) diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py index df74dcd217..88ef4b201a 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/vray_proxies.py @@ -4,8 +4,8 @@ from collections import defaultdict import logging from maya import cmds +import ayon_api -from ayon_core.client import get_last_version_by_subset_name from ayon_core.pipeline import get_current_project_name import ayon_core.hosts.maya.lib as maya_lib from . import lib @@ -51,7 +51,7 @@ def assign_vrayproxy_shaders(vrayproxy, assignments): index += 1 -def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"): +def vrayproxy_assign_look(vrayproxy, product_name="lookMain"): # type: (str, str) -> None """Assign look to vray proxy. @@ -73,27 +73,28 @@ def vrayproxy_assign_look(vrayproxy, product_name="lookDefault"): # Group by asset id so we run over the look per asset node_ids_by_asset_id = defaultdict(set) for node_id in nodes_by_id: - asset_id = node_id.split(":", 1)[0] - node_ids_by_asset_id[asset_id].add(node_id) + folder_id = node_id.split(":", 1)[0] + node_ids_by_asset_id[folder_id].add(node_id) project_name = get_current_project_name() - for asset_id, node_ids in node_ids_by_asset_id.items(): + for folder_id, node_ids in node_ids_by_asset_id.items(): # Get latest look version - version = get_last_version_by_subset_name( + version_entity = ayon_api.get_last_version_by_product_name( project_name, - subset_name=product_name, - asset_id=asset_id, - fields=["_id"] + product_name, + folder_id, + fields={"id"} ) - if not version: + if not version_entity: print("Didn't find last version for product name {}".format( product_name )) continue + version_id = version_entity["id"] - relationships = lib.get_look_relationships(version["_id"]) - shadernodes, _ = lib.load_look(version["_id"]) + relationships = lib.get_look_relationships(version_id) + shadernodes, _ = lib.load_look(version_id) # Get only the node ids and paths related to this asset # And get the shader edits the look supplies diff --git a/client/ayon_core/hosts/maya/tools/mayalookassigner/widgets.py b/client/ayon_core/hosts/maya/tools/mayalookassigner/widgets.py index 234a1c149e..f345b87e36 100644 --- a/client/ayon_core/hosts/maya/tools/mayalookassigner/widgets.py +++ b/client/ayon_core/hosts/maya/tools/mayalookassigner/widgets.py @@ -3,7 +3,6 @@ from collections import defaultdict from qtpy import QtWidgets, QtCore -from ayon_core.client import get_asset_name_identifier from ayon_core.tools.utils.models import TreeModel from ayon_core.tools.utils.lib import ( preserve_expanded_rows, @@ -110,7 +109,7 @@ class AssetOutliner(QtWidgets.QWidget): self.add_items(items) def get_nodes(self, selection=False): - """Find the nodes in the current scene per asset.""" + """Find the nodes in the current scene per folder.""" items = self.get_selected_items() @@ -119,26 +118,27 @@ class AssetOutliner(QtWidgets.QWidget): nodes = cmds.ls(dag=True, long=True) else: nodes = commands.get_selected_nodes() - id_nodes = commands.create_asset_id_hash(nodes) + id_nodes = commands.create_folder_id_hash(nodes) - # Collect the asset item entries per asset + # Collect the asset item entries per folder # and collect the namespaces we'd like to apply - assets = {} - asset_namespaces = defaultdict(set) + folder_items = {} + namespaces_by_folder_path = defaultdict(set) for item in items: - asset_id = str(item["asset"]["_id"]) - asset_name = get_asset_name_identifier(item["asset"]) - asset_namespaces[asset_name].add(item.get("namespace")) + folder_entity = item["folder_entity"] + folder_id = folder_entity["id"] + folder_path = folder_entity["path"] + namespaces_by_folder_path[folder_path].add(item.get("namespace")) - if asset_name in assets: + if folder_path in folder_items: continue - assets[asset_name] = item - assets[asset_name]["nodes"] = id_nodes.get(asset_id, []) + folder_items[folder_path] = item + folder_items[folder_path]["nodes"] = id_nodes.get(folder_id, []) # Filter nodes to namespace (if only namespaces were selected) - for asset_name in assets: - namespaces = asset_namespaces[asset_name] + for folder_path in folder_items: + namespaces = namespaces_by_folder_path[folder_path] # When None is present there should be no filtering if None in namespaces: @@ -146,12 +146,12 @@ class AssetOutliner(QtWidgets.QWidget): # Else only namespaces are selected and *not* the top entry so # we should filter to only those namespaces. - nodes = assets[asset_name]["nodes"] + nodes = folder_items[folder_path]["nodes"] nodes = [node for node in nodes if commands.get_namespace_from_node(node) in namespaces] - assets[asset_name]["nodes"] = nodes + folder_items[folder_path]["nodes"] = nodes - return assets + return folder_items def select_asset_from_items(self): """Select nodes from listed asset""" diff --git a/client/ayon_core/vendor/python/common/capture.py b/client/ayon_core/hosts/maya/vendor/python/capture.py similarity index 99% rename from client/ayon_core/vendor/python/common/capture.py rename to client/ayon_core/hosts/maya/vendor/python/capture.py index 224699f916..4ccfdb35f3 100644 --- a/client/ayon_core/vendor/python/common/capture.py +++ b/client/ayon_core/hosts/maya/vendor/python/capture.py @@ -12,11 +12,7 @@ import logging from maya import cmds from maya import mel -try: - from PySide2 import QtGui, QtWidgets -except ImportError: - from PySide import QtGui - QtWidgets = QtGui +from qtpy import QtGui, QtWidgets version_info = (2, 3, 0) @@ -873,7 +869,11 @@ def _get_screen_size(): if _in_standalone(): return [0, 0] - rect = QtWidgets.QDesktopWidget().screenGeometry(-1) + try: + rect = QtWidgets.QDesktopWidget().screenGeometry(-1) + except AttributeError: + # in Qt6 it is a different call + rect = QtWidgets.QApplication.primaryScreen().availableGeometry() return [rect.width(), rect.height()] diff --git a/client/ayon_core/hosts/nuke/api/lib.py b/client/ayon_core/hosts/nuke/api/lib.py index e304b33dc7..78cbe85097 100644 --- a/client/ayon_core/hosts/nuke/api/lib.py +++ b/client/ayon_core/hosts/nuke/api/lib.py @@ -12,14 +12,7 @@ from collections import OrderedDict import nuke from qtpy import QtCore, QtWidgets - -from ayon_core.client import ( - get_project, - get_asset_by_name, - get_versions, - get_last_versions, - get_representations, -) +import ayon_api from ayon_core.host import HostDirmap from ayon_core.tools.utils import host_tools @@ -40,16 +33,15 @@ from ayon_core.settings import ( from ayon_core.addon import AddonsManager from ayon_core.pipeline.template_data import get_template_data_with_names from ayon_core.pipeline import ( - discover_legacy_creator_plugins, Anatomy, get_current_host_name, get_current_project_name, - get_current_asset_name, + get_current_folder_path, AYON_INSTANCE_ID, AVALON_INSTANCE_ID, ) from ayon_core.pipeline.context_tools import ( - get_custom_workfile_template_from_session + get_current_context_custom_workfile_template ) from ayon_core.pipeline.colorspace import get_imageio_config from ayon_core.pipeline.workfile import BuildWorkfile @@ -128,7 +120,7 @@ class Context: workfiles_tool_timer = None # Seems unused - _project_doc = None + _project_entity = None def get_main_window(): @@ -397,7 +389,13 @@ def imprint(node, data, tab=None): """ for knob in create_knobs(data, tab): - node.addKnob(knob) + # If knob name exists we set the value. Technically there could be + # multiple knobs with the same name, but the intent is not to have + # duplicated knobs so we do not account for that. + if knob.name() in node.knobs().keys(): + node[knob.name()].setValue(knob.value()) + else: + node.addKnob(knob) @deprecated @@ -822,7 +820,7 @@ def on_script_load(): def check_inventory_versions(): """ - Actual version idetifier of Loaded containers + Actual version identifier of Loaded containers Any time this function is run it will check all nodes and filter only Loader nodes for its version. It will get all versions from database @@ -852,60 +850,62 @@ def check_inventory_versions(): project_name = get_current_project_name() # Find representations based on found containers - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, representation_ids=repre_ids, - fields=["_id", "parent"] + fields={"id", "versionId"} ) # Store representations by id and collect version ids - repre_docs_by_id = {} + repre_entities_by_id = {} version_ids = set() - for repre_doc in repre_docs: + for repre_entity in repre_entities: # Use stringed representation id to match value in containers - repre_id = str(repre_doc["_id"]) - repre_docs_by_id[repre_id] = repre_doc - version_ids.add(repre_doc["parent"]) + repre_id = repre_entity["id"] + repre_entities_by_id[repre_id] = repre_entity + version_ids.add(repre_entity["versionId"]) - version_docs = get_versions( - project_name, version_ids, fields=["_id", "name", "parent"] + version_entities = ayon_api.get_versions( + project_name, + version_ids=version_ids, + fields={"id", "version", "productId"}, ) # Store versions by id and collect product ids - version_docs_by_id = {} + version_entities_by_id = {} product_ids = set() - for version_doc in version_docs: - version_docs_by_id[version_doc["_id"]] = version_doc - product_ids.add(version_doc["parent"]) + for version_entity in version_entities: + version_entities_by_id[version_entity["id"]] = version_entity + product_ids.add(version_entity["productId"]) # Query last versions based on product ids - last_versions_by_product_id = get_last_versions( - project_name, subset_ids=product_ids, fields=["_id", "parent"] + last_versions_by_product_id = ayon_api.get_last_versions( + project_name, product_ids=product_ids, fields={"id", "productId"} ) # Loop through collected container nodes and their representation ids for item in node_with_repre_id: # Some python versions of nuke can't unfold tuple in for loop node, repre_id = item - repre_doc = repre_docs_by_id.get(repre_id) + repre_entity = repre_entities_by_id.get(repre_id) # Failsafe for not finding the representation. - if not repre_doc: + if not repre_entity: log.warning(( "Could not find the representation on node \"{}\"" ).format(node.name())) continue - version_id = repre_doc["parent"] - version_doc = version_docs_by_id.get(version_id) - if not version_doc: + version_id = repre_entity["versionId"] + version_entity = version_entities_by_id.get(version_id) + if not version_entity: log.warning(( "Could not find the version on node \"{}\"" ).format(node.name())) continue # Get last version based on product id - product_id = version_doc["parent"] + product_id = version_entity["productId"] last_version = last_versions_by_product_id[product_id] # Check if last version is same as current version - if last_version["_id"] == version_doc["_id"]: + if last_version["id"] == version_entity["id"]: color_value = "0x4ecd25ff" else: color_value = "0xd84f20ff" @@ -927,7 +927,7 @@ def writes_version_sync(): for each in nuke.allNodes(filter="Write"): # check if the node is avalon tracked - if _NODE_TAB_NAME not in each.knobs(): + if NODE_TAB_NAME not in each.knobs(): continue avalon_knob_data = read_avalon_data(each) @@ -988,28 +988,20 @@ def format_anatomy(data): project_name = get_current_project_name() anatomy = Anatomy(project_name) - log.debug("__ anatomy.templates: {}".format(anatomy.templates)) - padding = None - if "frame_padding" in anatomy.templates.keys(): - padding = int(anatomy.templates["frame_padding"]) - elif "render" in anatomy.templates.keys(): - padding = int( - anatomy.templates["render"].get( - "frame_padding" - ) - ) + frame_padding = anatomy.templates_obj.frame_padding - version = data.get("version", None) - if not version: + version = data.get("version") + if version is None: file = script_name() data["version"] = get_version_from_path(file) - asset_name = data["folderPath"] + folder_path = data["folderPath"] task_name = data["task"] host_name = get_current_host_name() + context_data = get_template_data_with_names( - project_name, asset_name, task_name, host_name + project_name, folder_path, task_name, host_name ) data.update(context_data) data.update({ @@ -1019,7 +1011,7 @@ def format_anatomy(data): "name": data["productName"], "type": data["productType"], }, - "frame": "#" * padding, + "frame": "#" * frame_padding, }) return anatomy.format(data) @@ -1177,7 +1169,9 @@ def create_write_node( anatomy_filled = format_anatomy(data) # build file path to workfiles - fdir = str(anatomy_filled["work"]["folder"]).replace("\\", "/") + fdir = str( + anatomy_filled["work"]["default"]["directory"] + ).replace("\\", "/") data["work"] = fdir fpath = StringTemplate(data["fpath_template"]).format_strict(data) @@ -1453,17 +1447,19 @@ class WorkfileSettings(object): """ def __init__(self, root_node=None, nodes=None, **kwargs): - project_doc = kwargs.get("project") - if project_doc is None: + project_entity = kwargs.get("project") + if project_entity is None: project_name = get_current_project_name() - project_doc = get_project(project_name) + project_entity = ayon_api.get_project(project_name) else: - project_name = project_doc["name"] + project_name = project_entity["name"] - Context._project_doc = project_doc + Context._project_entity = project_entity self._project_name = project_name - self._asset = get_current_asset_name() - self._asset_entity = get_asset_by_name(project_name, self._asset) + self._folder_path = get_current_folder_path() + self._folder_entity = ayon_api.get_folder_by_path( + project_name, self._folder_path + ) self._root_node = root_node or nuke.root() self._nodes = self.get_nodes(nodes=nodes) @@ -1957,39 +1953,43 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. self.set_reads_colorspace(read_clrs_inputs) def reset_frame_range_handles(self): - """Set frame range to current asset""" + """Set frame range to current folder.""" - if "data" not in self._asset_entity: - msg = "Asset {} don't have set any 'data'".format(self._asset) + if "attrib" not in self._folder_entity: + msg = "Folder {} don't have set any 'attrib'".format( + self._folder_path + ) log.warning(msg) nuke.message(msg) return - asset_data = self._asset_entity["data"] + folder_attributes = self._folder_entity["attrib"] missing_cols = [] check_cols = ["fps", "frameStart", "frameEnd", "handleStart", "handleEnd"] for col in check_cols: - if col not in asset_data: + if col not in folder_attributes: missing_cols.append(col) if len(missing_cols) > 0: missing = ", ".join(missing_cols) - msg = "'{}' are not set for asset '{}'!".format( - missing, self._asset) + msg = "'{}' are not set for folder '{}'!".format( + missing, self._folder_path) log.warning(msg) nuke.message(msg) return # get handles values - handle_start = asset_data["handleStart"] - handle_end = asset_data["handleEnd"] + handle_start = folder_attributes["handleStart"] + handle_end = folder_attributes["handleEnd"] + frame_start = folder_attributes["frameStart"] + frame_end = folder_attributes["frameEnd"] - fps = float(asset_data["fps"]) - frame_start_handle = int(asset_data["frameStart"]) - handle_start - frame_end_handle = int(asset_data["frameEnd"]) + handle_end + fps = float(folder_attributes["fps"]) + frame_start_handle = frame_start - handle_start + frame_end_handle = frame_end + handle_end self._root_node["lock_range"].setValue(False) self._root_node["fps"].setValue(fps) @@ -2000,10 +2000,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. # update node graph so knobs are updated update_node_graph() - frame_range = '{0}-{1}'.format( - int(asset_data["frameStart"]), - int(asset_data["frameEnd"]) - ) + frame_range = '{0}-{1}'.format(frame_start, frame_end) for node in nuke.allNodes(filter="Viewer"): node['frame_range'].setValue(frame_range) @@ -2030,18 +2027,12 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. """Set resolution to project resolution.""" log.info("Resetting resolution") project_name = get_current_project_name() - asset_data = self._asset_entity["data"] + folder_attributes = self._folder_entity["attrib"] format_data = { - "width": int(asset_data.get( - 'resolutionWidth', - asset_data.get('resolution_width'))), - "height": int(asset_data.get( - 'resolutionHeight', - asset_data.get('resolution_height'))), - "pixel_aspect": asset_data.get( - 'pixelAspect', - asset_data.get('pixel_aspect', 1)), + "width": folder_attributes["resolutionWidth"], + "height": folder_attributes["resolutionHeight"], + "pixel_aspect": folder_attributes["pixelAspect"], "name": project_name } @@ -2108,7 +2099,11 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. from .utils import set_context_favorites work_dir = os.getenv("AYON_WORKDIR") - asset = get_current_asset_name() + # TODO validate functionality + # - does expect the structure is '{root}/{project}/{folder}' + # - this used asset name expecting it is unique in project + folder_path = get_current_folder_path() + folder_name = folder_path.split("/")[-1] favorite_items = OrderedDict() # project @@ -2120,13 +2115,13 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies. # add to favorites favorite_items.update({"Project dir": project_dir.replace("\\", "/")}) - # asset - asset_root = os.path.normpath(work_dir.split( - asset)[0]) - # add asset name - asset_dir = os.path.join(asset_root, asset) + "/" + # folder + folder_root = os.path.normpath(work_dir.split( + folder_name)[0]) + # add folder name + folder_dir = os.path.join(folder_root, folder_name) + "/" # add to favorites - favorite_items.update({"Shot dir": asset_dir.replace("\\", "/")}) + favorite_items.update({"Shot dir": folder_dir.replace("\\", "/")}) # workdir favorite_items.update({"Work dir": work_dir.replace("\\", "/")}) @@ -2392,7 +2387,7 @@ def launch_workfiles_app(): Context.workfiles_launched = True - # get all imortant settings + # get all important settings open_at_start = env_value_to_bool( env_key="AYON_WORKFILE_TOOL_ON_START", default=None) @@ -2463,7 +2458,7 @@ def process_workfile_builder(): # generate first version in file not existing and feature is enabled if create_fv_on and not os.path.exists(last_workfile_path): # get custom template path if any - custom_template_path = get_custom_workfile_template_from_session( + custom_template_path = get_current_context_custom_workfile_template( project_settings=project_settings ) @@ -2632,11 +2627,11 @@ class NukeDirmap(HostDirmap): class DirmapCache: - """Caching class to get settings and sync_module easily and only once.""" + """Caching class to get settings and sitesync easily and only once.""" _project_name = None _project_settings = None - _sync_module_discovered = False - _sync_module = None + _sitesync_addon_discovered = False + _sitesync_addon = None _mapping = None @classmethod @@ -2652,11 +2647,11 @@ class DirmapCache: return cls._project_settings @classmethod - def sync_module(cls): - if not cls._sync_module_discovered: - cls._sync_module_discovered = True - cls._sync_module = AddonsManager().get("sync_server") - return cls._sync_module + def sitesync_addon(cls): + if not cls._sitesync_addon_discovered: + cls._sitesync_addon_discovered = True + cls._sitesync_addon = AddonsManager().get("sitesync") + return cls._sitesync_addon @classmethod def mapping(cls): @@ -2678,7 +2673,7 @@ def dirmap_file_name_filter(file_name): "nuke", DirmapCache.project_name(), DirmapCache.project_settings(), - DirmapCache.sync_module(), + DirmapCache.sitesync_addon(), ) if not DirmapCache.mapping(): DirmapCache.set_mapping(dirmap_processor.get_mappings()) diff --git a/client/ayon_core/hosts/nuke/api/pipeline.py b/client/ayon_core/hosts/nuke/api/pipeline.py index 582df952d3..0d44aba2f9 100644 --- a/client/ayon_core/hosts/nuke/api/pipeline.py +++ b/client/ayon_core/hosts/nuke/api/pipeline.py @@ -21,7 +21,7 @@ from ayon_core.pipeline import ( AYON_INSTANCE_ID, AVALON_INSTANCE_ID, AVALON_CONTAINER_ID, - get_current_asset_name, + get_current_folder_path, get_current_task_name, registered_host, ) @@ -30,13 +30,11 @@ from ayon_core.tools.utils import host_tools from ayon_core.hosts.nuke import NUKE_ROOT_DIR from ayon_core.tools.workfile_template_build import open_template_ui -from .command import viewer_update_and_undo_stop from .lib import ( Context, ROOT_DATA_KNOB, INSTANCE_DATA_KNOB, get_main_window, - add_publish_knob, WorkfileSettings, # TODO: remove this once workfile builder will be removed process_workfile_builder, @@ -128,7 +126,7 @@ class NukeHost( register_creator_plugin_path(CREATE_PATH) register_inventory_action_path(INVENTORY_PATH) - # Register Avalon event for workfiles loading. + # Register AYON event for workfiles loading. register_event_callback("workio.open_file", check_inventory_versions) register_event_callback("taskChanged", change_context_label) @@ -224,15 +222,15 @@ def _show_workfiles(): def get_context_label(): return "{0}, {1}".format( - get_current_asset_name(), + get_current_folder_path(), get_current_task_name() ) def _install_menu(): - """Install Avalon menu into Nuke's main menu bar.""" + """Install AYON menu into Nuke's main menu bar.""" - # uninstall original avalon menu + # uninstall original AYON menu main_window = get_main_window() menubar = nuke.menu("Nuke") menu = menubar.addMenu(MENU_LABEL) @@ -432,7 +430,7 @@ def containerise(node, ("name", name), ("namespace", namespace), ("loader", str(loader)), - ("representation", context["representation"]["_id"]), + ("representation", context["representation"]["id"]), ], **data or dict() diff --git a/client/ayon_core/hosts/nuke/api/plugin.py b/client/ayon_core/hosts/nuke/api/plugin.py index b36dfc56e6..5b97fab0c2 100644 --- a/client/ayon_core/hosts/nuke/api/plugin.py +++ b/client/ayon_core/hosts/nuke/api/plugin.py @@ -5,8 +5,7 @@ import sys import six import random import string -from collections import OrderedDict, defaultdict -from abc import abstractmethod +from collections import defaultdict from ayon_core.settings import get_current_project_settings from ayon_core.lib import ( @@ -14,7 +13,6 @@ from ayon_core.lib import ( EnumDef ) from ayon_core.pipeline import ( - LegacyCreator, LoaderPlugin, CreatorError, Creator as NewCreator, @@ -34,18 +32,13 @@ from ayon_core.lib.transcoding import ( from .lib import ( INSTANCE_DATA_KNOB, Knobby, - check_product_name_exists, maintained_selection, get_avalon_knob_data, - set_avalon_knob_data, - add_publish_knob, - get_nuke_imageio_settings, set_node_knobs_from_settings, set_node_data, get_node_data, get_view_process_node, get_viewer_config_from_string, - deprecated, get_filenames_without_hash, link_knobs ) @@ -95,7 +88,7 @@ class NukeCreator(NewCreator): any node having instance data knob. Arguments: - product_name (str): Subset name + product_name (str): Product name """ for node in nuke.allNodes(recurseGroups=True): @@ -910,8 +903,8 @@ class ExporterReviewMov(ExporterReview): self._connect_to_above_nodes( node, product_name, "Reposition node... `{}`" ) - # append reformated tag - add_tags.append("reformated") + # append reformatted tag + add_tags.append("reformatted") # only create colorspace baking if toggled on if bake_viewer_process: @@ -1114,7 +1107,7 @@ def convert_to_valid_instaces(): transfer_data["active"] = ( node["publish"].value()) - # add idetifier + # add identifier transfer_data["creator_identifier"] = product_type_to_identifier( product_type ) diff --git a/client/ayon_core/hosts/nuke/api/utils.py b/client/ayon_core/hosts/nuke/api/utils.py index d738ba5464..1bfc1919fa 100644 --- a/client/ayon_core/hosts/nuke/api/utils.py +++ b/client/ayon_core/hosts/nuke/api/utils.py @@ -1,4 +1,6 @@ import os +import re + import nuke from ayon_core import resources @@ -103,9 +105,8 @@ def colorspace_exists_on_node(node, colorspace_name): except ValueError: # knob is not available on input node return False - all_clrs = get_colorspace_list(colorspace_knob) - return colorspace_name in all_clrs + return colorspace_name in get_colorspace_list(colorspace_knob) def get_colorspace_list(colorspace_knob): @@ -117,19 +118,22 @@ def get_colorspace_list(colorspace_knob): Returns: list: list of strings names of profiles """ + results = [] - all_clrs = list(colorspace_knob.values()) - reduced_clrs = [] + # This pattern is to match with roles which uses an indentation and + # parentheses with original colorspace. The value returned from the + # colorspace is the string before the indentation, so we'll need to + # convert the values to match with value returned from the knob, + # ei. knob.value(). + pattern = r".*\t.* \(.*\)" + for colorspace in nuke.getColorspaceList(colorspace_knob): + match = re.search(pattern, colorspace) + if match: + results.append(colorspace.split("\t", 1)[0]) + else: + results.append(colorspace) - if not colorspace_knob.getFlag(nuke.STRIP_CASCADE_PREFIX): - return all_clrs - - # strip colorspace with nested path - for clrs in all_clrs: - clrs = clrs.split('/')[-1] - reduced_clrs.append(clrs) - - return reduced_clrs + return results def is_headless(): diff --git a/client/ayon_core/hosts/nuke/api/workfile_template_builder.py b/client/ayon_core/hosts/nuke/api/workfile_template_builder.py index 218ba97dd5..495edd9e5f 100644 --- a/client/ayon_core/hosts/nuke/api/workfile_template_builder.py +++ b/client/ayon_core/hosts/nuke/api/workfile_template_builder.py @@ -167,7 +167,7 @@ class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): placeholder.data["nodes_init"] = nuke.allNodes() def _before_repre_load(self, placeholder, representation): - placeholder.data["last_repre_id"] = str(representation["_id"]) + placeholder.data["last_repre_id"] = representation["id"] def collect_placeholders(self): output = [] diff --git a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py index 2f6d121af5..afef3ba843 100644 --- a/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py +++ b/client/ayon_core/hosts/nuke/hooks/pre_nukeassist_setup.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PreLaunchHook +from ayon_applications import PreLaunchHook class PrelaunchNukeAssistHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py b/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py index f113bec887..8fb8abfbbf 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/nuke/plugins/create/convert_legacy.py @@ -1,5 +1,5 @@ from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID -from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin +from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin from ayon_core.hosts.nuke.api.lib import ( INSTANCE_DATA_KNOB, get_node_data, @@ -11,7 +11,7 @@ from ayon_core.hosts.nuke.api.plugin import convert_to_valid_instaces import nuke -class LegacyConverted(SubsetConvertorPlugin): +class LegacyConverted(ProductConvertorPlugin): identifier = "legacy.converter" def find_instances(self): diff --git a/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py b/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py index 0a0467787a..b9d83a2b48 100644 --- a/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py +++ b/client/ayon_core/hosts/nuke/plugins/create/workfile_creator.py @@ -1,5 +1,6 @@ +import ayon_api + import ayon_core.hosts.nuke.api as api -from ayon_core.client import get_asset_by_name from ayon_core.pipeline import ( AutoCreator, CreatedInstance, @@ -27,27 +28,32 @@ class WorkfileCreator(AutoCreator): ) project_name = self.create_context.get_current_project_name() - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() task_name = self.create_context.get_current_task_name() host_name = self.create_context.host_name - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) instance_data.update({ - "asset": asset_name, + "folderPath": folder_path, "task": task_name, "variant": self.default_variant }) instance_data.update(self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, instance_data diff --git a/client/ayon_core/hosts/nuke/plugins/load/actions.py b/client/ayon_core/hosts/nuke/plugins/load/actions.py index de51321924..53cb03087b 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/actions.py +++ b/client/ayon_core/hosts/nuke/plugins/load/actions.py @@ -4,6 +4,7 @@ from ayon_core.lib import Logger from ayon_core.pipeline import load +from ayon_core.hosts.nuke.api import lib log = Logger.get_logger(__name__) @@ -11,12 +12,14 @@ log = Logger.get_logger(__name__) class SetFrameRangeLoader(load.LoaderPlugin): """Set frame range excluding pre- and post-handles""" - families = ["animation", - "camera", - "write", - "yeticache", - "pointcache"] - representations = ["*"] + product_types = { + "animation", + "camera", + "write", + "yeticache", + "pointcache", + } + representations = {"*"} extensions = {"*"} label = "Set frame range" @@ -25,14 +28,11 @@ class SetFrameRangeLoader(load.LoaderPlugin): color = "white" def load(self, context, name, namespace, data): + version_entity = context["version"] + version_attributes = version_entity["attrib"] - from ayon_core.hosts.nuke.api import lib - - version = context['version'] - version_data = version.get("data", {}) - - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") log.info("start: {}, end: {}".format(start, end)) if start is None or end is None: @@ -46,12 +46,14 @@ class SetFrameRangeLoader(load.LoaderPlugin): class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): """Set frame range including pre- and post-handles""" - families = ["animation", - "camera", - "write", - "yeticache", - "pointcache"] - representations = ["*"] + product_types = { + "animation", + "camera", + "write", + "yeticache", + "pointcache", + } + representations = {"*"} label = "Set frame range (with handles)" order = 12 @@ -59,14 +61,9 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): color = "white" def load(self, context, name, namespace, data): - - from ayon_core.hosts.nuke.api import lib - - version = context['version'] - version_data = version.get("data", {}) - - start = version_data.get("frameStart", None) - end = version_data.get("frameEnd", None) + version_attributes = context["version"]["attrib"] + start = version_attributes.get("frameStart") + end = version_attributes.get("frameEnd") if start is None or end is None: print("Skipping setting frame range because start or " @@ -74,7 +71,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): return # Include handles - start -= version_data.get("handleStart", 0) - end += version_data.get("handleEnd", 0) + start -= version_attributes.get("handleStart", 0) + end += version_attributes.get("handleEnd", 0) lib.update_frame_range(start, end) diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py b/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py index ed512c86ab..7d823919dc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_backdrop.py @@ -1,13 +1,9 @@ import nuke import nukescripts +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api.lib import ( @@ -25,8 +21,8 @@ from ayon_core.hosts.nuke.api import containerise, update_container class LoadBackdropNodes(load.LoaderPlugin): """Loading Published Backdrop nodes (workfile, nukenodes)""" - families = ["workfile", "nukenodes"] - representations = ["*"] + product_types = {"workfile", "nukenodes"} + representations = {"*"} extensions = {"nk"} label = "Import Nuke Nodes" @@ -43,7 +39,7 @@ class LoadBackdropNodes(load.LoaderPlugin): Arguments: context (dict): context of version name (str): name of the version - namespace (str): asset name + namespace (str): namespace name data (dict): compulsory attribute > not used Returns: @@ -51,24 +47,23 @@ class LoadBackdropNodes(load.LoaderPlugin): """ # get main variables - version = context['version'] - version_data = version.get("data", {}) - vname = version.get("name", None) - namespace = namespace or context['asset']['name'] - colorspace = version_data.get("colorspace", None) + namespace = namespace or context["folder"]["name"] + version_entity = context["version"] + + version_attributes = version_entity["attrib"] + colorspace = version_attributes.get("colorSpace") + object_name = "{}_{}".format(name, namespace) # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["source", "author", "fps"] - data_imprint = { - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + # add attributes from the version to imprint to metadata knob + for k in ["source", "author", "fps"]: + data_imprint[k] = version_attributes[k] # getting file path file = self.filepath_from_context(context).replace("\\", "/") @@ -178,7 +173,7 @@ class LoadBackdropNodes(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -189,31 +184,30 @@ class LoadBackdropNodes(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] # get corresponding node GN = container["node"] - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_entity).replace("\\", "/") - name = container['name'] - version_data = version_doc.get("data", {}) - vname = version_doc.get("name", None) - namespace = container['namespace'] - colorspace = version_data.get("colorspace", None) + name = container["name"] + namespace = container["namespace"] object_name = "{}_{}".format(name, namespace) - add_keys = ["source", "author", "fps"] + version_attributes = version_entity["attrib"] + colorspace = version_attributes.get("colorSpace") data_imprint = { - "representation": str(representation["_id"]), - "version": vname, + "representation": repre_entity["id"], + "version": version_entity["version"], "colorspaceInput": colorspace, } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + for k in ["source", "author", "fps"]: + data_imprint[k] = version_attributes[k] # adding nodes to node graph # just in case we are in group lets jump out of it @@ -233,23 +227,25 @@ class LoadBackdropNodes(load.LoaderPlugin): GN["name"].setValue(object_name) # get all versions in list - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = self.node_color else: color_value = "0xd88467ff" GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info( + "updated to version: {}".format(version_entity["version"]) + ) return update_container(GN, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py b/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py index 919a3beb06..14c54c3adc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_camera_abc.py @@ -1,12 +1,8 @@ import nuke +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api import ( @@ -24,8 +20,8 @@ class AlembicCameraLoader(load.LoaderPlugin): This will load alembic camera into script. """ - families = ["camera"] - representations = ["*"] + product_types = {"camera"} + representations = {"*"} extensions = {"abc"} label = "Load Alembic Camera" @@ -35,27 +31,25 @@ class AlembicCameraLoader(load.LoaderPlugin): def load(self, context, name, namespace, data): # get main variables - version = context['version'] - version_data = version.get("data", {}) - vname = version.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - fps = version_data.get("fps") or nuke.root()["fps"].getValue() - namespace = namespace or context['asset']['name'] + version_entity = context["version"] + + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() + + namespace = namespace or context["folder"]["name"] object_name = "{}_{}".format(name, namespace) # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["source", "author", "fps"] - + # add additional metadata from the version to imprint to metadata knob data_imprint = { "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], } - - for k in add_keys: - data_imprint.update({k: version_data[k]}) + for k in ["source", "author", "fps"]: + data_imprint[k] = version_attributes[k] # getting file path file = self.filepath_from_context(context).replace("\\", "/") @@ -82,7 +76,9 @@ class AlembicCameraLoader(load.LoaderPlugin): camera_node.setXYpos(xpos, ypos) # color node by correct color by actual version - self.node_version_color(version, camera_node) + self.node_version_color( + context["project"]["name"], version_entity, camera_node + ) return containerise( node=camera_node, @@ -92,7 +88,7 @@ class AlembicCameraLoader(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """ Called by Scene Inventory when look should be updated to current version. @@ -109,32 +105,29 @@ class AlembicCameraLoader(load.LoaderPlugin): None """ # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + version_entity = context["version"] + repre_entity = context["representation"] # get main variables - version_data = version_doc.get("data", {}) - vname = version_doc.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - fps = version_data.get("fps") or nuke.root()["fps"].getValue() + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["source", "author", "fps"] - data_imprint = { - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "frameStart": first, "frameEnd": last, - "version": vname + "version": version_entity["version"] } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + # add attributes from the version to imprint to metadata knob + for k in ["source", "author", "fps"]: + data_imprint[k] = version_attributes[k] # getting file path - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_entity).replace("\\", "/") with maintained_selection(): camera_node = container["node"] @@ -169,30 +162,33 @@ class AlembicCameraLoader(load.LoaderPlugin): d.setInput(index, camera_node) # color node by correct color by actual version - self.node_version_color(version_doc, camera_node) + self.node_version_color( + context["project"]["name"], version_entity, camera_node + ) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info( + "updated to version: {}".format(version_entity["version"]) + ) return update_container(camera_node, data_imprint) - def node_version_color(self, version_doc, node): + def node_version_color(self, project_name, version_entity, node): """ Coloring a node by correct color by actual version """ # get all versions in list - project_name = get_current_project_name() - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = self.node_color else: color_value = "0xd88467ff" node["tile_color"].setValue(int(color_value, 16)) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_clip.py b/client/ayon_core/hosts/nuke/plugins/load/load_clip.py index 31b523fbc8..df8f2ab018 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_clip.py @@ -1,16 +1,16 @@ +from copy import deepcopy + import nuke import qargparse -from pprint import pformat -from copy import deepcopy +import ayon_api + from ayon_core.lib import Logger -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( - get_current_project_name, get_representation_path, ) +from ayon_core.pipeline.colorspace import ( + get_imageio_file_rules_colorspace_from_filepath +) from ayon_core.hosts.nuke.api.lib import ( get_imageio_input_colorspace, maintained_selection @@ -35,14 +35,14 @@ class LoadClip(plugin.NukeLoader): """ log = Logger.get_logger(__name__) - families = [ + product_types = { "source", "plate", "render", "prerender", - "review" - ] - representations = ["*"] + "review", + } + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) @@ -85,18 +85,21 @@ class LoadClip(plugin.NukeLoader): return cls.representations_include or cls.representations def load(self, context, name, namespace, options): - """Load asset via database - """ - representation = context["representation"] + """Load asset via database.""" + project_name = context["project"]["name"] + repre_entity = context["representation"] + version_entity = context["version"] + version_attributes = version_entity["attrib"] + version_data = version_entity["data"] + # reset container id so it is always unique for each instance self.reset_container_id() - is_sequence = len(representation["files"]) > 1 + is_sequence = len(repre_entity["files"]) > 1 if is_sequence: - context["representation"] = \ - self._representation_with_hash_in_frame( - representation + context["representation"] = ( + self._representation_with_hash_in_frame(repre_entity) ) filepath = self.filepath_from_context(context) @@ -109,20 +112,16 @@ class LoadClip(plugin.NukeLoader): add_retime = options.get( "add_retime", self.options_defaults["add_retime"]) - version = context['version'] - version_data = version.get("data", {}) - repre_id = representation["_id"] + repre_id = repre_entity["id"] - self.log.debug("_ version_data: {}\n".format( - pformat(version_data))) self.log.debug( "Representation id `{}` ".format(repre_id)) - self.handle_start = version_data.get("handleStart", 0) - self.handle_end = version_data.get("handleEnd", 0) + self.handle_start = version_attributes.get("handleStart", 0) + self.handle_end = version_attributes.get("handleEnd", 0) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") first -= self.handle_start last += self.handle_end @@ -131,16 +130,28 @@ class LoadClip(plugin.NukeLoader): first = 1 last = first + duration - # Fallback to asset name when namespace is None + # If a slate is present, the frame range is 1 frame longer for movies, + # but file sequences its the first frame that is 1 frame lower. + slate_frames = repre_entity["data"].get("slateFrames", 0) + extension = "." + repre_entity["context"]["ext"] + + if extension in VIDEO_EXTENSIONS: + last += slate_frames + + files_count = len(repre_entity["files"]) + if extension in IMAGE_EXTENSIONS and files_count != 1: + first -= slate_frames + + # Fallback to folder name when namespace is None if namespace is None: - namespace = context['asset']['name'] + namespace = context["folder"]["name"] if not filepath: self.log.warning( "Representation id `{}` is failing to load".format(repre_id)) return - read_name = self._get_node_name(representation) + read_name = self._get_node_name(context) # Create the Loader with the filename path set read_node = nuke.createNode( @@ -149,47 +160,54 @@ class LoadClip(plugin.NukeLoader): inpanel=False ) + # get colorspace + colorspace = ( + repre_entity["data"].get("colorspace") + or version_attributes.get("colorSpace") + ) + # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): read_node["file"].setValue(filepath) - used_colorspace = self._set_colorspace( - read_node, version_data, representation["data"], filepath) + self.set_colorspace_to_node( + read_node, + filepath, + project_name, + version_entity, + repre_entity + ) - self._set_range_to_node(read_node, first, last, start_at_workfile) + self._set_range_to_node( + read_node, first, last, start_at_workfile, slate_frames + ) - # add additional metadata from the version to imprint Avalon knob - add_keys = ["frameStart", "frameEnd", - "source", "colorspace", "author", "fps", "version", - "handleStart", "handleEnd"] + version_name = version_entity["version"] + if version_name < 0: + version_name = "hero" - data_imprint = {} - for key in add_keys: - if key == 'version': - version_doc = context["version"] - if version_doc["type"] == "hero_version": - version = "hero" - else: - version = version_doc.get("name") + data_imprint = { + "version": version_name, + "db_colorspace": colorspace + } - if version: - data_imprint[key] = version + # add attributes from the version to imprint metadata knob + for key in [ + "frameStart", + "frameEnd", + "source", + "author", + "fps", + "handleStart", + "handleEnd", + ]: + value = version_attributes.get(key, str(None)) + if isinstance(value, str): + value = value.replace("\\", "/") + data_imprint[key] = value - elif key == 'colorspace': - colorspace = representation["data"].get(key) - colorspace = colorspace or version_data.get(key) - data_imprint["db_colorspace"] = colorspace - if used_colorspace: - data_imprint["used_colorspace"] = used_colorspace - else: - value_ = context["version"]['data'].get( - key, str(None)) - if isinstance(value_, (str)): - value_ = value_.replace("\\", "/") - data_imprint[key] = value_ - - if add_retime and version_data.get("retime", None): + if add_retime and version_data.get("retime"): data_imprint["addRetime"] = True read_node["tile_color"].setValue(int("0x4ecd25ff", 16)) @@ -202,27 +220,28 @@ class LoadClip(plugin.NukeLoader): loader=self.__class__.__name__, data=data_imprint) - if add_retime and version_data.get("retime", None): + if add_retime and version_data.get("retime"): self._make_retimes(read_node, version_data) self.set_as_member(read_node) return container - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def _representation_with_hash_in_frame(self, representation): + def _representation_with_hash_in_frame(self, repre_entity): """Convert frame key value to padded hash Args: - representation (dict): representation data + repre_entity (dict): Representation entity. Returns: dict: altered representation data + """ - representation = deepcopy(representation) - context = representation["context"] + new_repre_entity = deepcopy(repre_entity) + context = new_repre_entity["context"] # Get the frame from the context and hash it frame = context["frame"] @@ -230,7 +249,7 @@ class LoadClip(plugin.NukeLoader): # Replace the frame with the hash in the originalBasename if ( - "{originalBasename}" in representation["data"]["template"] + "{originalBasename}" in new_repre_entity["attrib"]["template"] ): origin_basename = context["originalBasename"] context["originalBasename"] = origin_basename.replace( @@ -238,10 +257,10 @@ class LoadClip(plugin.NukeLoader): ) # Replace the frame with the hash in the frame - representation["context"]["frame"] = hashed_frame - return representation + new_repre_entity["context"]["frame"] = hashed_frame + return new_repre_entity - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -250,16 +269,25 @@ class LoadClip(plugin.NukeLoader): """ - is_sequence = len(representation["files"]) > 1 + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] + + version_attributes = version_entity["attrib"] + version_data = version_entity["data"] + + is_sequence = len(repre_entity["files"]) > 1 read_node = container["node"] if is_sequence: - representation = self._representation_with_hash_in_frame( - representation + repre_entity = self._representation_with_hash_in_frame( + repre_entity ) - filepath = get_representation_path(representation).replace("\\", "/") + filepath = ( + get_representation_path(repre_entity) + ).replace("\\", "/") self.log.debug("_ filepath: {}".format(filepath)) start_at_workfile = "start at" in read_node['frame_mode'].value() @@ -269,21 +297,19 @@ class LoadClip(plugin.NukeLoader): if "addRetime" in key ] - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) - - version_data = version_doc.get("data", {}) - repre_id = representation["_id"] + repre_id = repre_entity["id"] # colorspace profile - colorspace = representation["data"].get("colorspace") - colorspace = colorspace or version_data.get("colorspace") + colorspace = ( + repre_entity["data"].get("colorspace") + or version_attributes.get("colorSpace") + ) - self.handle_start = version_data.get("handleStart", 0) - self.handle_end = version_data.get("handleEnd", 0) + self.handle_start = version_attributes.get("handleStart", 0) + self.handle_end = version_attributes.get("handleEnd", 0) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") first -= self.handle_start last += self.handle_end @@ -302,54 +328,84 @@ class LoadClip(plugin.NukeLoader): # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): - used_colorspace = self._set_colorspace( - read_node, version_data, representation["data"], filepath) + self.set_colorspace_to_node( + read_node, + filepath, + project_name, + version_entity, + repre_entity + ) self._set_range_to_node(read_node, first, last, start_at_workfile) updated_dict = { - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "frameStart": str(first), "frameEnd": str(last), - "version": str(version_doc.get("name")), + "version": str(version_entity["version"]), "db_colorspace": colorspace, - "source": version_data.get("source"), + "source": version_attributes.get("source"), "handleStart": str(self.handle_start), "handleEnd": str(self.handle_end), - "fps": str(version_data.get("fps")), - "author": version_data.get("author") + "fps": str(version_attributes.get("fps")), + "author": version_attributes.get("author") } - # add used colorspace if found any - if used_colorspace: - updated_dict["used_colorspace"] = used_colorspace - - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of read_node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = "0x4ecd25ff" else: color_value = "0xd84f20ff" read_node["tile_color"].setValue(int(color_value, 16)) # Update the imprinted representation - update_container( - read_node, - updated_dict - ) + update_container(read_node, updated_dict) self.log.info( - "updated to version: {}".format(version_doc.get("name")) + "updated to version: {}".format(version_entity["version"]) ) - if add_retime and version_data.get("retime", None): + if add_retime and version_data.get("retime"): self._make_retimes(read_node, version_data) else: self.clear_members(read_node) self.set_as_member(read_node) + def set_colorspace_to_node( + self, + read_node, + filepath, + project_name, + version_entity, + repre_entity, + ): + """Set colorspace to read node. + + Sets colorspace with available names validation. + + Args: + read_node (nuke.Node): The nuke's read node + filepath (str): File path. + project_name (str): Project name. + version_entity (dict): Version entity. + repre_entity (dict): Representation entity. + + """ + used_colorspace = self._get_colorspace_data( + project_name, version_entity, repre_entity, filepath + ) + if ( + used_colorspace + and colorspace_exists_on_node(read_node, used_colorspace) + ): + self.log.info(f"Used colorspace: {used_colorspace}") + read_node["colorspace"].setValue(used_colorspace) + else: + self.log.info("Colorspace not set...") + def remove(self, container): read_node = container["node"] assert read_node.Class() == "Read", "Must be Read" @@ -360,14 +416,21 @@ class LoadClip(plugin.NukeLoader): for member in members: nuke.delete(member) - def _set_range_to_node(self, read_node, first, last, start_at_workfile): + def _set_range_to_node( + self, read_node, first, last, start_at_workfile, slate_frames=0 + ): read_node['origfirst'].setValue(int(first)) read_node['first'].setValue(int(first)) read_node['origlast'].setValue(int(last)) read_node['last'].setValue(int(last)) # set start frame depending on workfile or version - self._loader_shift(read_node, start_at_workfile) + if start_at_workfile: + read_node['frame_mode'].setValue("start at") + + start_frame = self.script_start - slate_frames + + read_node['frame'].setValue(str(start_frame)) def _make_retimes(self, parent_node, version_data): ''' Create all retime and timewarping nodes with copied animation ''' @@ -424,51 +487,78 @@ class LoadClip(plugin.NukeLoader): for i, n in enumerate(dependent_nodes): last_node.setInput(i, n) - def _loader_shift(self, read_node, workfile_start=False): - """ Set start frame of read node to a workfile start + def _get_node_name(self, context): + folder_entity = context["folder"] + product_name = context["product"]["name"] + repre_entity = context["representation"] - Args: - read_node (nuke.Node): The nuke's read node - workfile_start (bool): set workfile start frame if true - - """ - if workfile_start: - read_node['frame_mode'].setValue("start at") - read_node['frame'].setValue(str(self.script_start)) - - def _get_node_name(self, representation): - - repre_cont = representation["context"] + folder_name = folder_entity["name"] + repre_cont = repre_entity["context"] name_data = { - "asset": repre_cont["asset"], - "subset": repre_cont["subset"], - "representation": representation["name"], + "folder": { + "name": folder_name, + }, + "product": { + "name": product_name, + }, + "asset": folder_name, + "subset": product_name, + "representation": repre_entity["name"], "ext": repre_cont["representation"], - "id": representation["_id"], + "id": repre_entity["id"], "class_name": self.__class__.__name__ } return self.node_name_template.format(**name_data) - def _set_colorspace(self, node, version_data, repre_data, path): - output_color = None - path = path.replace("\\", "/") - # get colorspace - colorspace = repre_data.get("colorspace") - colorspace = colorspace or version_data.get("colorspace") + def _get_colorspace_data( + self, project_name, version_entity, repre_entity, filepath + ): + """Get colorspace data from version and representation documents - # colorspace from `project_settings/nuke/imageio/regex_inputs` - iio_colorspace = get_imageio_input_colorspace(path) + Args: + project_name (str): Project name. + version_entity (dict): Version entity. + repre_entity (dict): Representation entity. + filepath (str): File path. - # Set colorspace defined in version data - if ( - colorspace is not None - and colorspace_exists_on_node(node, str(colorspace)) - ): - node["colorspace"].setValue(str(colorspace)) - output_color = str(colorspace) - elif iio_colorspace is not None: - node["colorspace"].setValue(iio_colorspace) - output_color = iio_colorspace + Returns: + Any[str,None]: colorspace name or None + """ + # Get backward compatible colorspace key. + colorspace = repre_entity["data"].get("colorspace") + self.log.debug( + f"Colorspace from representation colorspace: {colorspace}" + ) - return output_color + # Get backward compatible version data key if colorspace is not found. + if not colorspace: + colorspace = version_entity["attrib"].get("colorSpace") + self.log.debug( + f"Colorspace from version colorspace: {colorspace}" + ) + + # Get colorspace from representation colorspaceData if colorspace is + # not found. + if not colorspace: + colorspace_data = repre_entity["data"].get("colorspaceData", {}) + colorspace = colorspace_data.get("colorspace") + self.log.debug( + f"Colorspace from representation colorspaceData: {colorspace}" + ) + + # check if any filerules are not applicable + new_parsed_colorspace = get_imageio_file_rules_colorspace_from_filepath( # noqa + filepath, "nuke", project_name + ) + self.log.debug(f"Colorspace new filerules: {new_parsed_colorspace}") + + # colorspace from `project_settings/nuke/imageio/regexInputs` + old_parsed_colorspace = get_imageio_input_colorspace(filepath) + self.log.debug(f"Colorspace old filerules: {old_parsed_colorspace}") + + return ( + new_parsed_colorspace + or old_parsed_colorspace + or colorspace + ) diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_effects.py b/client/ayon_core/hosts/nuke/plugins/load/load_effects.py index 0b5f31033e..a87c81295a 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_effects.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_effects.py @@ -2,14 +2,10 @@ import json from collections import OrderedDict import nuke import six +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api import ( @@ -22,8 +18,8 @@ from ayon_core.hosts.nuke.api import ( class LoadEffects(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" - families = ["effect"] - representations = ["*"] + product_types = {"effect"} + representations = {"*"} extensions = {"json"} label = "Load Effects - nodes" @@ -40,37 +36,43 @@ class LoadEffects(load.LoaderPlugin): Arguments: context (dict): context of version name (str): name of the version - namespace (str): asset name + namespace (str): namespace name data (dict): compulsory attribute > not used Returns: nuke node: containerised nuke node object """ # get main variables - version = context['version'] - version_data = version.get("data", {}) - vname = version.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) + version_entity = context["version"] + + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = namespace or context['asset']['name'] - colorspace = version_data.get("colorspace", None) + namespace = namespace or context["folder"]["name"] object_name = "{}_{}".format(name, namespace) # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] - data_imprint = { "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace, } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + # add additional metadata from the version to imprint to Avalon knob + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # getting file path file = self.filepath_from_context(context).replace("\\", "/") @@ -146,7 +148,7 @@ class LoadEffects(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -156,36 +158,41 @@ class LoadEffects(load.LoaderPlugin): """ # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] # get corresponding node GN = container["node"] - file = get_representation_path(representation).replace("\\", "/") - name = container['name'] - version_data = version_doc.get("data", {}) - vname = version_doc.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = container['namespace'] - colorspace = version_data.get("colorspace", None) - object_name = "{}_{}".format(name, namespace) + file = get_representation_path(repre_entity).replace("\\", "/") - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") + + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) + namespace = container["namespace"] data_imprint = { - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps", + ]: + data_imprint[k] = version_attributes[k] # Update the imprinted representation update_container( @@ -251,32 +258,34 @@ class LoadEffects(load.LoaderPlugin): # try to find parent read node self.connect_read_node(GN, namespace, json_f["assignTo"]) - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = "0x3469ffff" else: color_value = "0xd84f20ff" GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info( + "updated to version: {}".format(version_entity["version"]) + ) - def connect_read_node(self, group_node, asset, subset): + def connect_read_node(self, group_node, namespace, product_name): """ Finds read node and selects it Arguments: - asset (str): asset name + namespace (str): namespace name Returns: nuke node: node is selected None: if nothing found """ - search_name = "{0}_{1}".format(asset, subset) + search_name = "{0}_{1}".format(namespace, product_name) node = [ n for n in nuke.allNodes(filter="Read") @@ -344,8 +353,8 @@ class LoadEffects(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py b/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py index 4d8a8518f2..8fa1347598 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_effects_ip.py @@ -2,14 +2,10 @@ import json from collections import OrderedDict import six import nuke +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api import lib @@ -23,8 +19,8 @@ from ayon_core.hosts.nuke.api import ( class LoadEffectsInputProcess(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" - families = ["effect"] - representations = ["*"] + product_types = {"effect"} + representations = {"*"} extensions = {"json"} label = "Load Effects - Input Process" @@ -40,7 +36,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): Arguments: context (dict): context of version name (str): name of the version - namespace (str): asset name + namespace (str): namespace name data (dict): compulsory attribute > not used Returns: @@ -48,30 +44,35 @@ class LoadEffectsInputProcess(load.LoaderPlugin): """ # get main variables - version = context['version'] - version_data = version.get("data", {}) - vname = version.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) + version_entity = context["version"] + + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = namespace or context['asset']['name'] - colorspace = version_data.get("colorspace", None) + namespace = namespace or context["folder"]["name"] object_name = "{}_{}".format(name, namespace) # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] - data_imprint = { "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace, } - - for k in add_keys: - data_imprint.update({k: version_data[k]}) + # add additional metadata from the version to imprint to Avalon knob + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # getting file path file = self.filepath_from_context(context).replace("\\", "/") @@ -150,7 +151,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -161,33 +162,40 @@ class LoadEffectsInputProcess(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] # get corresponding node GN = container["node"] - file = get_representation_path(representation).replace("\\", "/") - version_data = version_doc.get("data", {}) - vname = version_doc.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - colorspace = version_data.get("colorspace", None) + file = get_representation_path(repre_entity).replace("\\", "/") - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") + + workfile_first_frame = int(nuke.root()["first_frame"].getValue()) data_imprint = { - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace, } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # Update the imprinted representation update_container( @@ -251,18 +259,18 @@ class LoadEffectsInputProcess(load.LoaderPlugin): output.setInput(0, pre_node) # get all versions in list - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = "0x3469ffff" else: color_value = "0xd84f20ff" GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info("updated to version: {}".format(version_entity["name"])) def connect_active_viewer(self, group_node): """ @@ -355,8 +363,8 @@ class LoadEffectsInputProcess(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py index 54daa74405..95f85bacfc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo.py @@ -1,12 +1,8 @@ import nuke +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api.lib import ( @@ -25,8 +21,8 @@ from ayon_core.hosts.nuke.api import ( class LoadGizmo(load.LoaderPlugin): """Loading nuke Gizmo""" - families = ["gizmo"] - representations = ["*"] + product_types = {"gizmo"} + representations = {"*"} extensions = {"nk"} label = "Load Gizmo" @@ -42,7 +38,7 @@ class LoadGizmo(load.LoaderPlugin): Arguments: context (dict): context of version name (str): name of the version - namespace (str): asset name + namespace (str): namespace name data (dict): compulsory attribute > not used Returns: @@ -50,29 +46,35 @@ class LoadGizmo(load.LoaderPlugin): """ # get main variables - version = context['version'] - version_data = version.get("data", {}) - vname = version.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - namespace = namespace or context['asset']['name'] - colorspace = version_data.get("colorspace", None) + version_entity = context["version"] + version_attributes = version_entity["attrib"] + + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") + + namespace = namespace or context["folder"]["name"] object_name = "{}_{}".format(name, namespace) # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] - data_imprint = { "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + # add attributes from the version to imprint to metadata knob + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # getting file path file = self.filepath_from_context(context).replace("\\", "/") @@ -97,7 +99,7 @@ class LoadGizmo(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -108,35 +110,39 @@ class LoadGizmo(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] + + version_attributes = version_entity["attrib"] # get corresponding node group_node = container["node"] - file = get_representation_path(representation).replace("\\", "/") - name = container['name'] - version_data = version_doc.get("data", {}) - vname = version_doc.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - namespace = container['namespace'] - colorspace = version_data.get("colorspace", None) - object_name = "{}_{}".format(name, namespace) + file = get_representation_path(repre_entity).replace("\\", "/") - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") data_imprint = { - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # capture pipeline metadata avalon_data = get_avalon_knob_data(group_node) @@ -157,24 +163,26 @@ class LoadGizmo(load.LoaderPlugin): # set updated pipeline metadata set_avalon_knob_data(new_group_node, avalon_data) - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = self.node_color else: color_value = "0xd88467ff" new_group_node["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info( + "updated to version: {}".format(version_entity["name"]) + ) return update_container(new_group_node, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py index 677d9868f1..3112e27811 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -1,13 +1,9 @@ import nuke import six +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api.lib import ( @@ -27,8 +23,8 @@ from ayon_core.hosts.nuke.api import ( class LoadGizmoInputProcess(load.LoaderPlugin): """Loading colorspace soft effect exported from nukestudio""" - families = ["gizmo"] - representations = ["*"] + product_types = {"gizmo"} + representations = {"*"} extensions = {"nk"} label = "Load Gizmo - Input Process" @@ -44,7 +40,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): Arguments: context (dict): context of version name (str): name of the version - namespace (str): asset name + namespace (str): namespace name data (dict): compulsory attribute > not used Returns: @@ -52,29 +48,35 @@ class LoadGizmoInputProcess(load.LoaderPlugin): """ # get main variables - version = context['version'] - version_data = version.get("data", {}) - vname = version.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - namespace = namespace or context['asset']['name'] - colorspace = version_data.get("colorspace", None) + version_entity = context["version"] + + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") + + namespace = namespace or context["folder"]["name"] object_name = "{}_{}".format(name, namespace) # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] - + # add additional metadata from the version to imprint to metadata knob data_imprint = { "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # getting file path file = self.filepath_from_context(context).replace("\\", "/") @@ -104,7 +106,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -115,35 +117,38 @@ class LoadGizmoInputProcess(load.LoaderPlugin): # get main variables # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] # get corresponding node group_node = container["node"] - file = get_representation_path(representation).replace("\\", "/") - name = container['name'] - version_data = version_doc.get("data", {}) - vname = version_doc.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - namespace = container['namespace'] - colorspace = version_data.get("colorspace", None) - object_name = "{}_{}".format(name, namespace) + file = get_representation_path(repre_entity).replace("\\", "/") - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") data_imprint = { - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "frameStart": first, "frameEnd": last, - "version": vname, + "version": version_entity["version"], "colorspaceInput": colorspace } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # capture pipeline metadata avalon_data = get_avalon_knob_data(group_node) @@ -164,18 +169,20 @@ class LoadGizmoInputProcess(load.LoaderPlugin): # set updated pipeline metadata set_avalon_knob_data(new_group_node, avalon_data) - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = self.node_color else: color_value = "0xd88467ff" new_group_node["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info( + "updated to version: {}".format(version_entity["version"]) + ) return update_container(new_group_node, data_imprint) @@ -254,8 +261,8 @@ class LoadGizmoInputProcess(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_image.py b/client/ayon_core/hosts/nuke/plugins/load/load_image.py index e9435ec10a..d825b621fc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_image.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_image.py @@ -1,14 +1,10 @@ import nuke import qargparse +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api.lib import ( @@ -27,16 +23,16 @@ from ayon_core.lib.transcoding import ( class LoadImage(load.LoaderPlugin): """Load still image into Nuke""" - families = [ + product_types = { "render2d", "source", "plate", "render", "prerender", "review", - "image" - ] - representations = ["*"] + "image", + } + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS ) @@ -72,40 +68,37 @@ class LoadImage(load.LoaderPlugin): "frame_number", int(nuke.root()["first_frame"].getValue()) ) - version = context['version'] - version_data = version.get("data", {}) - repr_id = context["representation"]["_id"] + version_entity = context["version"] + version_attributes = version_entity["attrib"] + repre_entity = context["representation"] + repre_id = repre_entity["id"] - self.log.info("version_data: {}\n".format(version_data)) self.log.debug( - "Representation id `{}` ".format(repr_id)) + "Representation id `{}` ".format(repre_id)) last = first = int(frame_number) - # Fallback to asset name when namespace is None + # Fallback to folder name when namespace is None if namespace is None: - namespace = context['asset']['name'] + namespace = context["folder"]["name"] file = self.filepath_from_context(context) if not file: - repr_id = context["representation"]["_id"] self.log.warning( - "Representation id `{}` is failing to load".format(repr_id)) + "Representation id `{}` is failing to load".format(repre_id)) return file = file.replace("\\", "/") - representation = context["representation"] - repr_cont = representation["context"] - frame = repr_cont.get("frame") + frame = repre_entity["context"].get("frame") if frame: padding = len(frame) file = file.replace( frame, format(frame_number, "0{}".format(padding))) - read_name = self._get_node_name(representation) + read_name = self._get_node_name(context) # Create the Loader with the filename path set with viewer_update_and_undo_stop(): @@ -118,7 +111,7 @@ class LoadImage(load.LoaderPlugin): r["file"].setValue(file) # Set colorspace defined in version data - colorspace = context["version"]["data"].get("colorspace") + colorspace = version_entity["attrib"].get("colorSpace") if colorspace: r["colorspace"].setValue(str(colorspace)) @@ -132,19 +125,16 @@ class LoadImage(load.LoaderPlugin): r["origlast"].setValue(last) r["last"].setValue(last) - # add additional metadata from the version to imprint Avalon knob - add_keys = ["source", "colorspace", "author", "fps", "version"] - + # add attributes from the version to imprint metadata knob + colorspace = version_attributes["colorSpace"] data_imprint = { "frameStart": first, - "frameEnd": last + "frameEnd": last, + "version": version_entity["version"], + "colorspace": colorspace, } - for k in add_keys: - if k == 'version': - data_imprint.update({k: context["version"]['name']}) - else: - data_imprint.update( - {k: context["version"]['data'].get(k, str(None))}) + for k in ["source", "author", "fps"]: + data_imprint[k] = version_attributes.get(k, str(None)) r["tile_color"].setValue(int("0x4ecd25ff", 16)) @@ -155,10 +145,10 @@ class LoadImage(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -171,14 +161,18 @@ class LoadImage(load.LoaderPlugin): assert node.Class() == "Read", "Must be Read" - repr_cont = representation["context"] + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] - file = get_representation_path(representation) + repr_cont = repre_entity["context"] + + file = get_representation_path(repre_entity) if not file: - repr_id = representation["_id"] + repre_id = repre_entity["id"] self.log.warning( - "Representation id `{}` is failing to load".format(repr_id)) + "Representation id `{}` is failing to load".format(repre_id)) return file = file.replace("\\", "/") @@ -191,14 +185,10 @@ class LoadImage(load.LoaderPlugin): format(frame_number, "0{}".format(padding))) # Get start frame from version data - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) - version_data = version_doc.get("data", {}) - last = first = int(frame_number) # Set the global in to the start frame of the sequence @@ -208,31 +198,30 @@ class LoadImage(load.LoaderPlugin): node["origlast"].setValue(last) node["last"].setValue(last) - updated_dict = {} - updated_dict.update({ - "representation": str(representation["_id"]), + version_attributes = version_entity["attrib"] + updated_dict = { + "representation": repre_entity["id"], "frameStart": str(first), "frameEnd": str(last), - "version": str(version_doc.get("name")), - "colorspace": version_data.get("colorspace"), - "source": version_data.get("source"), - "fps": str(version_data.get("fps")), - "author": version_data.get("author") - }) + "version": str(version_entity["version"]), + "colorspace": version_attributes.get("colorSpace"), + "source": version_attributes.get("source"), + "fps": str(version_attributes.get("fps")), + "author": version_attributes.get("author") + } # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = "0x4ecd25ff" else: color_value = "0xd84f20ff" node["tile_color"].setValue(int(color_value, 16)) # Update the imprinted representation - update_container( - node, - updated_dict - ) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + update_container(node, updated_dict) + self.log.info("updated to version: {}".format( + version_entity["version"] + )) def remove(self, container): node = container["node"] @@ -241,15 +230,25 @@ class LoadImage(load.LoaderPlugin): with viewer_update_and_undo_stop(): nuke.delete(node) - def _get_node_name(self, representation): + def _get_node_name(self, context): + folder_entity = context["folder"] + product_name = context["product"]["name"] + repre_entity = context["representation"] - repre_cont = representation["context"] + folder_name = folder_entity["name"] + repre_cont = repre_entity["context"] name_data = { - "asset": repre_cont["asset"], - "subset": repre_cont["subset"], - "representation": representation["name"], + "folder": { + "name": folder_name, + }, + "product": { + "name": product_name, + }, + "asset": folder_name, + "subset": product_name, + "representation": repre_entity["name"], "ext": repre_cont["representation"], - "id": representation["_id"], + "id": repre_entity["id"], "class_name": self.__class__.__name__ } diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py b/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py index 412181f3d9..beebd0458f 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_matchmove.py @@ -7,8 +7,8 @@ class MatchmoveLoader(load.LoaderPlugin): This will run matchmove script to create track in script. """ - families = ["matchmove"] - representations = ["*"] + product_types = {"matchmove"} + representations = {"*"} extensions = {"py"} defaults = ["Camera", "Object"] diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_model.py b/client/ayon_core/hosts/nuke/plugins/load/load_model.py index 125cb28e27..0326e0a4fc 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_model.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_model.py @@ -1,12 +1,8 @@ import nuke +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api.lib import maintained_selection @@ -22,8 +18,8 @@ class AlembicModelLoader(load.LoaderPlugin): This will load alembic model or anim into script. """ - families = ["model", "pointcache", "animation"] - representations = ["*"] + product_types = {"model", "pointcache", "animation"} + representations = {"*"} extensions = {"abc"} label = "Load Alembic" @@ -33,27 +29,26 @@ class AlembicModelLoader(load.LoaderPlugin): def load(self, context, name, namespace, data): # get main variables - version = context['version'] - version_data = version.get("data", {}) - vname = version.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - fps = version_data.get("fps") or nuke.root()["fps"].getValue() - namespace = namespace or context['asset']['name'] + project_name = context["project"]["name"] + version_entity = context["version"] + + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() + + namespace = namespace or context["folder"]["name"] object_name = "{}_{}".format(name, namespace) # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["source", "author", "fps"] - data_imprint = { "frameStart": first, "frameEnd": last, - "version": vname + "version": version_entity["version"] } - - for k in add_keys: - data_imprint.update({k: version_data[k]}) + # add attributes from the version to imprint to metadata knob + for k in ["source", "author", "fps"]: + data_imprint[k] = version_attributes[k] # getting file path file = self.filepath_from_context(context).replace("\\", "/") @@ -86,7 +81,7 @@ class AlembicModelLoader(load.LoaderPlugin): model_node.setXYpos(xpos, ypos) # color node by correct color by actual version - self.node_version_color(version, model_node) + self.node_version_color(project_name, version_entity, model_node) return containerise( node=model_node, @@ -96,7 +91,7 @@ class AlembicModelLoader(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def update(self, container, representation): + def update(self, container, context): """ Called by Scene Inventory when look should be updated to current version. @@ -106,42 +101,40 @@ class AlembicModelLoader(load.LoaderPlugin): Args: container: object that has look to be updated - representation: (dict): relationship data to get proper + context: (dict): relationship data to get proper representation from DB and persisted data in .json Returns: None """ # Get version from io - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] # get corresponding node model_node = container["node"] # get main variables - version_data = version_doc.get("data", {}) - vname = version_doc.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) - fps = version_data.get("fps") or nuke.root()["fps"].getValue() + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() # prepare data for imprinting - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["source", "author", "fps"] - data_imprint = { - "representation": str(representation["_id"]), + "representation": repre_entity["id"], "frameStart": first, "frameEnd": last, - "version": vname + "version": version_entity["version"] } - for k in add_keys: - data_imprint.update({k: version_data[k]}) + # add additional metadata from the version to imprint to Avalon knob + for k in ["source", "author", "fps"]: + data_imprint[k] = version_attributes[k] # getting file path - file = get_representation_path(representation).replace("\\", "/") + file = get_representation_path(repre_entity).replace("\\", "/") with maintained_selection(): model_node['selected'].setValue(True) @@ -181,29 +174,30 @@ class AlembicModelLoader(load.LoaderPlugin): d.setInput(index, model_node) # color node by correct color by actual version - self.node_version_color(version_doc, model_node) + self.node_version_color(project_name, version_entity, model_node) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info( + "updated to version: {}".format(version_entity["version"]) + ) return update_container(model_node, data_imprint) - def node_version_color(self, version, node): + def node_version_color(self, project_name, version_entity, node): """ Coloring a node by correct color by actual version""" - project_name = get_current_project_name() - last_version_doc = get_last_version_by_subset_id( - project_name, version["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = self.node_color else: color_value = "0xd88467ff" node["tile_color"].setValue(int(color_value, 16)) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = nuke.toNode(container['objectName']) diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py b/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py index e168c2bac1..c369030b65 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_ociolook.py @@ -1,16 +1,13 @@ import os import json import secrets + import nuke import six +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id -) from ayon_core.pipeline import ( load, - get_current_project_name, get_representation_path, ) from ayon_core.hosts.nuke.api import ( @@ -23,8 +20,8 @@ from ayon_core.hosts.nuke.api import ( class LoadOcioLookNodes(load.LoaderPlugin): """Loading Ocio look to the nuke.Node graph""" - families = ["ociolook"] - representations = ["*"] + product_types = {"ociolook"} + representations = {"*"} extensions = {"json"} label = "Load OcioLook [nodes]" @@ -47,13 +44,13 @@ class LoadOcioLookNodes(load.LoaderPlugin): Arguments: context (dict): context of version name (str): name of the version - namespace (str): asset name + namespace (str): namespace name data (dict): compulsory attribute > not used Returns: nuke.Node: containerized nuke.Node object """ - namespace = namespace or context['asset']['name'] + namespace = namespace or context["folder"]["name"] suffix = secrets.token_hex(nbytes=4) node_name = "{}_{}_{}".format( name, namespace, suffix) @@ -68,7 +65,11 @@ class LoadOcioLookNodes(load.LoaderPlugin): # renaming group node group_node["name"].setValue(node_name) - self._node_version_color(context["version"], group_node) + self._node_version_color( + context["project"]["name"], + context["version"], + group_node + ) self.log.info( "Loaded lut setup: `{}`".format(group_node["name"].value())) @@ -219,14 +220,12 @@ class LoadOcioLookNodes(load.LoaderPlugin): return group_node - def update(self, container, representation): - - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) + def update(self, container, context): + repre_entity = context["representation"] group_node = container["node"] - filepath = get_representation_path(representation) + filepath = get_representation_path(repre_entity) json_f = self._load_json_data(filepath) @@ -236,13 +235,15 @@ class LoadOcioLookNodes(load.LoaderPlugin): group_node ) - self._node_version_color(version_doc, group_node) + self._node_version_color( + context["project"]["name"], context["version"], group_node + ) self.log.info("Updated lut setup: `{}`".format( group_node["name"].value())) return update_container( - group_node, {"representation": str(representation["_id"])}) + group_node, {"representation": repre_entity["id"]}) def _load_json_data(self, filepath): # getting data from json file with unicode conversion @@ -280,24 +281,23 @@ class LoadOcioLookNodes(load.LoaderPlugin): else: return input - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def remove(self, container): node = nuke.toNode(container['objectName']) with viewer_update_and_undo_stop(): nuke.delete(node) - def _node_version_color(self, version, node): + def _node_version_color(self, project_name, version_entity, node): """ Coloring a node by correct color by actual version""" - project_name = get_current_project_name() - last_version_doc = get_last_version_by_subset_id( - project_name, version["parent"], fields=["_id"] + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} ) # change color of node - if version["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = self.current_node_color else: color_value = self.old_node_color diff --git a/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py b/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py index 1c91e51a09..3e554f9d3b 100644 --- a/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py +++ b/client/ayon_core/hosts/nuke/plugins/load/load_script_precomp.py @@ -1,11 +1,7 @@ import nuke +import ayon_api -from ayon_core.client import ( - get_version_by_id, - get_last_version_by_subset_id, -) from ayon_core.pipeline import ( - get_current_project_name, load, get_representation_path, ) @@ -20,8 +16,8 @@ from ayon_core.hosts.nuke.api import ( class LinkAsGroup(load.LoaderPlugin): """Copy the published file to be pasted at the desired location""" - families = ["workfile", "nukenodes"] - representations = ["*"] + product_types = {"workfile", "nukenodes"} + representations = {"*"} extensions = {"nk"} label = "Load Precomp" @@ -32,34 +28,37 @@ class LinkAsGroup(load.LoaderPlugin): def load(self, context, name, namespace, data): # for k, v in context.items(): # log.info("key: `{}`, value: {}\n".format(k, v)) - version = context['version'] - version_data = version.get("data", {}) + version_entity = context["version"] - vname = version.get("name", None) - first = version_data.get("frameStart", None) - last = version_data.get("frameEnd", None) + version_attributes = version_entity["attrib"] + first = version_attributes.get("frameStart") + last = version_attributes.get("frameEnd") + colorspace = version_attributes.get("colorSpace") - # Fallback to asset name when namespace is None + # Fallback to folder name when namespace is None if namespace is None: - namespace = context['asset']['name'] + namespace = context["folder"]["name"] file = self.filepath_from_context(context).replace("\\", "/") self.log.info("file: {}\n".format(file)) - self.log.info("versionData: {}\n".format(context["version"]["data"])) - - # add additional metadata from the version to imprint to Avalon knob - add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd", - "source", "author", "fps"] - data_imprint = { - "startingFrame": first, - "frameStart": first, - "frameEnd": last, - "version": vname + "startingFrame": first, + "frameStart": first, + "frameEnd": last, + "version": version_entity["version"] } - for k in add_keys: - data_imprint.update({k: context["version"]['data'][k]}) + # add additional metadata from the version to imprint to Avalon knob + for k in [ + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "source", + "author", + "fps" + ]: + data_imprint[k] = version_attributes[k] # group context is set to precomp, so back up one level. nuke.endGroup() @@ -72,7 +71,6 @@ class LinkAsGroup(load.LoaderPlugin): ) # Set colorspace defined in version data - colorspace = context["version"]["data"].get("colorspace", None) self.log.info("colorspace: {}\n".format(colorspace)) P["name"].setValue("{}_{}".format(name, namespace)) @@ -104,10 +102,10 @@ class LinkAsGroup(load.LoaderPlugin): loader=self.__class__.__name__, data=data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """Update the Loader's path Nuke automatically tries to reset some variables when changing @@ -117,26 +115,24 @@ class LinkAsGroup(load.LoaderPlugin): """ node = container["node"] - root = get_representation_path(representation).replace("\\", "/") + project_name = context["project"]["name"] + version_entity = context["version"] + repre_entity = context["representation"] + + root = get_representation_path(repre_entity).replace("\\", "/") # Get start frame from version data - project_name = get_current_project_name() - version_doc = get_version_by_id(project_name, representation["parent"]) - last_version_doc = get_last_version_by_subset_id( - project_name, version_doc["parent"], fields=["_id"] - ) - updated_dict = {} - version_data = version_doc["data"] - updated_dict.update({ - "representation": str(representation["_id"]), - "frameEnd": version_data.get("frameEnd"), - "version": version_doc.get("name"), - "colorspace": version_data.get("colorspace"), - "source": version_data.get("source"), - "fps": version_data.get("fps"), - "author": version_data.get("author") - }) + version_attributes = version_entity["attrib"] + updated_dict = { + "representation": repre_entity["id"], + "frameEnd": version_attributes.get("frameEnd"), + "version": version_entity["version"], + "colorspace": version_attributes.get("colorSpace"), + "source": version_attributes.get("source"), + "fps": version_attributes.get("fps"), + "author": version_attributes.get("author") + } # Update the imprinted representation update_container( @@ -146,14 +142,19 @@ class LinkAsGroup(load.LoaderPlugin): node["file"].setValue(root) + last_version_entity = ayon_api.get_last_version_by_product_id( + project_name, version_entity["productId"], fields={"id"} + ) # change color of node - if version_doc["_id"] == last_version_doc["_id"]: + if version_entity["id"] == last_version_entity["id"]: color_value = "0xff0ff0ff" else: color_value = "0xd84f20ff" node["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version_doc.get("name"))) + self.log.info( + "updated to version: {}".format(version_entity["version"]) + ) def remove(self, container): node = container["node"] diff --git a/client/ayon_core/hosts/nuke/plugins/publish/collect_slate_node.py b/client/ayon_core/hosts/nuke/plugins/publish/collect_slate_node.py index 3baa0cd9b5..ac30bd6051 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/collect_slate_node.py @@ -17,7 +17,8 @@ class CollectSlate(pyblish.api.InstancePlugin): ( n_ for n_ in nuke.allNodes() if "slate" in n_.name().lower() - if not n_["disable"].getValue() + if not n_["disable"].getValue() and + "publish_instance" not in n_.knobs() # Exclude instance nodes. ), None ) diff --git a/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py b/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py index 58afb2cd1f..745351dc49 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/collect_writes.py @@ -194,7 +194,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, "frameEndHandle": last_frame, }) - # TODO temporarily set stagingDir as persistent for backward # compatibility. This is mainly focused on `renders`folders which # were previously not cleaned up (and could be used in read notes) @@ -269,10 +268,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin, "tags": [] } - frame_start_str = self._get_frame_start_str(first_frame, last_frame) - - representation['frameStart'] = frame_start_str - # set slate frame collected_frames = self._add_slate_frame_to_collected_frames( instance, diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py index 1f5a8c73e1..a1a5acb63b 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_camera.py @@ -1,6 +1,5 @@ import os import math -from pprint import pformat import nuke diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py index 8ac07c641c..8d7a3ec311 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_review_intermediates.py @@ -109,7 +109,7 @@ class ExtractReviewIntermediates(publish.Extractor): if f_task_types and task_type not in f_task_types: continue - # test subsets from filter + # test products from filter if product_names and not any( re.search(p, product_name) for p in product_names ): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py index e44e5686b6..d325684a7c 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_script_save.py @@ -2,10 +2,10 @@ import nuke import pyblish.api -class ExtractScriptSave(pyblish.api.Extractor): +class ExtractScriptSave(pyblish.api.InstancePlugin): """Save current Nuke workfile script""" label = 'Script Save' - order = pyblish.api.Extractor.order - 0.1 + order = pyblish.api.ExtractorOrder - 0.1 hosts = ['nuke'] def process(self, instance): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py b/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py index c013da84d2..627888ac92 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -300,6 +300,10 @@ class ExtractSlateFrame(publish.Extractor): self.log.debug( "__ matching_repre: {}".format(pformat(matching_repre))) + data = matching_repre.get("data", {}) + data["slateFrames"] = 1 + matching_repre["data"] = data + self.log.info("Added slate frame to representation files") def add_comment_slate_node(self, instance, node): diff --git a/client/ayon_core/hosts/nuke/plugins/publish/help/validate_asset_context.xml b/client/ayon_core/hosts/nuke/plugins/publish/help/validate_asset_context.xml index d9394ae510..1e7d340a13 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/help/validate_asset_context.xml +++ b/client/ayon_core/hosts/nuke/plugins/publish/help/validate_asset_context.xml @@ -1,13 +1,13 @@ - Shot/Asset name + Folder path -## Publishing to a different asset context +## Publishing to a different folder context -There are publish instances present which are publishing into a different asset than your current context. +There are publish instances present which are publishing into a different folder than your current context. -Usually this is not what you want but there can be cases where you might want to publish into another asset/shot or task. +Usually this is not what you want but there can be cases where you might want to publish into another folder/shot or task. If that's the case you can disable the validation on the instance to ignore it. diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py index 52ef4a58d4..93a30aa438 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_asset_context.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Validate if instance asset is the same as context asset.""" +"""Validate if instance folder is the same as context folder.""" from __future__ import absolute_import import pyblish.api @@ -17,10 +17,10 @@ class ValidateCorrectAssetContext( pyblish.api.InstancePlugin, OptionalPyblishPluginMixin ): - """Validator to check if instance asset context match context asset. + """Validator to check if instance folder context match context folder. When working in per-shot style you always publish data in context of - current asset (shot). This validator checks if this is so. It is optional + current folder (shot). This validator checks if this is so. It is optional so it can be disabled when needed. Checking `folderPath` and `task` keys. diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py index 281e172788..8bcde9609d 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_knobs.py @@ -1,3 +1,5 @@ +import json + import nuke import six import pyblish.api diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py index 852267f68c..76ac7e97ad 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -1,6 +1,6 @@ -import os import pyblish.api import clique + from ayon_core.pipeline import PublishXmlValidationError from ayon_core.pipeline.publish import get_errored_instances_from_context diff --git a/client/ayon_core/hosts/nuke/plugins/publish/validate_script_attributes.py b/client/ayon_core/hosts/nuke/plugins/publish/validate_script_attributes.py index c4974817bd..2bd2034079 100644 --- a/client/ayon_core/hosts/nuke/plugins/publish/validate_script_attributes.py +++ b/client/ayon_core/hosts/nuke/plugins/publish/validate_script_attributes.py @@ -29,7 +29,7 @@ class ValidateScriptAttributes( script_data = deepcopy(instance.context.data["scriptData"]) - asset = instance.data["assetEntity"] + src_folder_attributes = instance.data["folderEntity"]["attrib"] # These attributes will be checked attributes = [ @@ -42,32 +42,32 @@ class ValidateScriptAttributes( "handleEnd" ] - # get only defined attributes from asset data - asset_attributes = { - attr: asset["data"][attr] + # get only defined attributes from folder data + folder_attributes = { + attr: src_folder_attributes[attr] for attr in attributes - if attr in asset["data"] + if attr in src_folder_attributes } # fix frame values to include handles - asset_attributes["fps"] = float("{0:.4f}".format( - asset_attributes["fps"])) + folder_attributes["fps"] = float("{0:.4f}".format( + folder_attributes["fps"])) script_data["fps"] = float("{0:.4f}".format( script_data["fps"])) - # Compare asset's values Nukescript X Database + # Compare folder's values Nukescript X Database not_matching = [] for attr in attributes: self.log.debug( - "Asset vs Script attribute \"{}\": {}, {}".format( + "Folder vs Script attribute \"{}\": {}, {}".format( attr, - asset_attributes[attr], + folder_attributes[attr], script_data[attr] ) ) - if asset_attributes[attr] != script_data[attr]: + if folder_attributes[attr] != script_data[attr]: not_matching.append({ "name": attr, - "expected": asset_attributes[attr], + "expected": folder_attributes[attr], "actual": script_data[attr] }) diff --git a/client/ayon_core/hosts/nuke/startup/custom_write_node.py b/client/ayon_core/hosts/nuke/startup/custom_write_node.py index 84e99f34c4..f119e69919 100644 --- a/client/ayon_core/hosts/nuke/startup/custom_write_node.py +++ b/client/ayon_core/hosts/nuke/startup/custom_write_node.py @@ -1,8 +1,8 @@ -""" OpenPype custom script for setting up write nodes for non-publish """ +""" AYON custom script for setting up write nodes for non-publish """ import os import nuke import nukescripts -from ayon_core.pipeline import Anatomy +from ayon_core.pipeline import Anatomy, get_current_project_name from ayon_core.hosts.nuke.api.lib import ( set_node_knobs_from_settings, get_nuke_imageio_settings @@ -102,13 +102,9 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel): for knob in ext_knob_list: ext = knob["value"] - anatomy = Anatomy() + anatomy = Anatomy(get_current_project_name()) - frame_padding = int( - anatomy.templates["render"].get( - "frame_padding" - ) - ) + frame_padding = anatomy.templates_obj.frame_padding for write_node in write_selected_nodes: # data for mapping the path # TODO add more fill data @@ -145,8 +141,8 @@ class WriteNodeKnobSettingPanel(nukescripts.PythonPanel): for setting in settings: # TODO change 'subsets' to 'product_names' in settings - for subset in setting["subsets"]: - preset_name.append(subset) + for product_name in setting["subsets"]: + preset_name.append(product_name) return preset_name, knobs_nodes diff --git a/client/ayon_core/hosts/nuke/startup/frame_setting_for_read_nodes.py b/client/ayon_core/hosts/nuke/startup/frame_setting_for_read_nodes.py index f0cbabe20f..3e1430c3b1 100644 --- a/client/ayon_core/hosts/nuke/startup/frame_setting_for_read_nodes.py +++ b/client/ayon_core/hosts/nuke/startup/frame_setting_for_read_nodes.py @@ -1,4 +1,4 @@ -""" OpenPype custom script for resetting read nodes start frame values """ +""" AYON custom script for resetting read nodes start frame values """ import nuke import nukescripts diff --git a/client/ayon_core/hosts/photoshop/__init__.py b/client/ayon_core/hosts/photoshop/__init__.py index 773f73d624..cf21b7df75 100644 --- a/client/ayon_core/hosts/photoshop/__init__.py +++ b/client/ayon_core/hosts/photoshop/__init__.py @@ -1,10 +1,12 @@ from .addon import ( + PHOTOSHOP_ADDON_ROOT, PhotoshopAddon, - PHOTOSHOP_HOST_DIR, + get_launch_script_path, ) __all__ = ( + "PHOTOSHOP_ADDON_ROOT", "PhotoshopAddon", - "PHOTOSHOP_HOST_DIR", + "get_launch_script_path", ) diff --git a/client/ayon_core/hosts/photoshop/addon.py b/client/ayon_core/hosts/photoshop/addon.py index 3016912960..65fe6a7cd1 100644 --- a/client/ayon_core/hosts/photoshop/addon.py +++ b/client/ayon_core/hosts/photoshop/addon.py @@ -1,7 +1,7 @@ import os from ayon_core.addon import AYONAddon, IHostAddon -PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) +PHOTOSHOP_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) class PhotoshopAddon(AYONAddon, IHostAddon): @@ -20,3 +20,17 @@ class PhotoshopAddon(AYONAddon, IHostAddon): def get_workfile_extensions(self): return [".psd", ".psb"] + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(PHOTOSHOP_ADDON_ROOT, "hooks") + ] + + +def get_launch_script_path(): + return os.path.join( + PHOTOSHOP_ADDON_ROOT, "api", "launch_script.py" + ) + diff --git a/client/ayon_core/hosts/photoshop/api/README.md b/client/ayon_core/hosts/photoshop/api/README.md index 51a9b9ad5e..b391131a42 100644 --- a/client/ayon_core/hosts/photoshop/api/README.md +++ b/client/ayon_core/hosts/photoshop/api/README.md @@ -207,7 +207,7 @@ class ImageLoader(load.LoaderPlugin): """ families = ["image"] - representations = ["*"] + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): path = self.filepath_from_context(context) @@ -224,23 +224,23 @@ class ImageLoader(load.LoaderPlugin): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): layer = container.pop("layer") - + repre_entity = context["representation"] with photoshop.maintained_selection(): stub.replace_smart_object( - layer, get_representation_path(representation) + layer, get_representation_path(repre_entity) ) stub.imprint( - layer, {"representation": str(representation["_id"])} + layer, {"representation": repre_entity["id"]} ) def remove(self, container): container["layer"].Delete() - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) ``` For easier debugging of Javascript: https://community.adobe.com/t5/download-install/adobe-extension-debuger-problem/td-p/10911704?page=1 diff --git a/client/ayon_core/hosts/photoshop/api/extension/host/index.jsx b/client/ayon_core/hosts/photoshop/api/extension/host/index.jsx index e2711fb960..b697ee65ab 100644 --- a/client/ayon_core/hosts/photoshop/api/extension/host/index.jsx +++ b/client/ayon_core/hosts/photoshop/api/extension/host/index.jsx @@ -213,7 +213,7 @@ function getActiveDocumentFullName(){ function imprint(payload){ /** * Sets headline content of current document with metadata. Stores - * information about assets created through Avalon. + * information about assets created through AYON. * Content accessible in PS through File > File Info * **/ diff --git a/client/ayon_core/hosts/photoshop/api/launch_logic.py b/client/ayon_core/hosts/photoshop/api/launch_logic.py index adf90be311..c388f93044 100644 --- a/client/ayon_core/hosts/photoshop/api/launch_logic.py +++ b/client/ayon_core/hosts/photoshop/api/launch_logic.py @@ -8,9 +8,10 @@ from wsrpc_aiohttp import ( WebSocketAsync ) +import ayon_api from qtpy import QtCore -from ayon_core.lib import Logger, StringTemplate +from ayon_core.lib import Logger from ayon_core.pipeline import ( registered_host, Anatomy, @@ -23,7 +24,6 @@ from ayon_core.pipeline.template_data import get_template_data_with_names from ayon_core.tools.utils import host_tools from ayon_core.tools.adobe_webserver.app import WebServerTool from ayon_core.pipeline.context_tools import change_current_context -from ayon_core.client import get_asset_by_name from .ws_stub import PhotoshopServerStub @@ -318,25 +318,28 @@ class PhotoshopRoute(WebSocketRoute): # This method calls function on the client side # client functions - async def set_context(self, project, asset, task): + async def set_context(self, project, folder, task): """ - Sets 'project' and 'asset' to envs, eg. setting context. + Sets 'project' and 'folder' to envs, eg. setting context. Opens last workile from that context if exists. Args: project (str) - asset (str) + folder (str) task (str """ log.info("Setting context change") - log.info(f"project {project} asset {asset} task {task}") + log.info(f"project {project} folder {folder} task {task}") - asset_doc = get_asset_by_name(project, asset) - change_current_context(asset_doc, task) + folder_entity = ayon_api.get_folder_by_path(project, folder) + task_entity = ayon_api.get_task_by_name( + project, folder_entity["id"], task + ) + change_current_context(folder_entity, task_entity) last_workfile_path = self._get_last_workfile_path(project, - asset, + folder, task) if last_workfile_path and os.path.exists(last_workfile_path): ProcessLauncher.execute_in_main_thread( @@ -372,32 +375,30 @@ class PhotoshopRoute(WebSocketRoute): # Required return statement. return "nothing" - def _get_last_workfile_path(self, project_name, asset_name, task_name): + def _get_last_workfile_path(self, project_name, folder_path, task_name): """Returns last workfile path if exists""" host = registered_host() host_name = "photoshop" template_key = get_workfile_template_key_from_context( - asset_name, + project_name, + folder_path, task_name, host_name, - project_name=project_name ) anatomy = Anatomy(project_name) data = get_template_data_with_names( - project_name, asset_name, task_name, host_name + project_name, folder_path, task_name, host_name ) data["root"] = anatomy.roots - file_template = anatomy.templates[template_key]["file"] + work_template = anatomy.get_template_item("work", template_key) # Define saving file extension extensions = host.get_workfile_extensions() - folder_template = anatomy.templates[template_key]["folder"] - work_root = StringTemplate.format_strict_template( - folder_template, data - ) + work_root = work_template["directory"].format_strict(data) + file_template = work_template["file"].template last_workfile_path = get_last_workfile( work_root, file_template, data, extensions, True ) diff --git a/client/ayon_core/hosts/photoshop/api/launch_script.py b/client/ayon_core/hosts/photoshop/api/launch_script.py new file mode 100644 index 0000000000..bb4de80086 --- /dev/null +++ b/client/ayon_core/hosts/photoshop/api/launch_script.py @@ -0,0 +1,93 @@ +"""Script wraps launch mechanism of Photoshop implementations. + +Arguments passed to the script are passed to launch function in host +implementation. In all cases requires host app executable and may contain +workfile or others. +""" + +import os +import sys + +from ayon_core.hosts.photoshop.api.lib import main as host_main + +# Get current file to locate start point of sys.argv +CURRENT_FILE = os.path.abspath(__file__) + + +def show_error_messagebox(title, message, detail_message=None): + """Function will show message and process ends after closing it.""" + from qtpy import QtWidgets, QtCore + from ayon_core import style + + app = QtWidgets.QApplication([]) + app.setStyleSheet(style.load_stylesheet()) + + msgbox = QtWidgets.QMessageBox() + msgbox.setWindowTitle(title) + msgbox.setText(message) + + if detail_message: + msgbox.setDetailedText(detail_message) + + msgbox.setWindowModality(QtCore.Qt.ApplicationModal) + msgbox.show() + + sys.exit(app.exec_()) + + +def on_invalid_args(script_not_found): + """Show to user message box saying that something went wrong. + + Tell user that arguments to launch implementation are invalid with + arguments details. + + Args: + script_not_found (bool): Use different message based on this value. + """ + + title = "Invalid arguments" + joined_args = ", ".join("\"{}\"".format(arg) for arg in sys.argv) + if script_not_found: + submsg = "Where couldn't find script path:\n\"{}\"" + else: + submsg = "Expected Host executable after script path:\n\"{}\"" + + message = "BUG: Got invalid arguments so can't launch Host application." + detail_message = "Process was launched with arguments:\n{}\n\n{}".format( + joined_args, + submsg.format(CURRENT_FILE) + ) + + show_error_messagebox(title, message, detail_message) + + +def main(argv): + # Modify current file path to find match in sys.argv which may be different + # on windows (different letter cases and slashes). + modified_current_file = CURRENT_FILE.replace("\\", "/").lower() + + # Create a copy of sys argv + sys_args = list(argv) + after_script_idx = None + # Find script path in sys.argv to know index of argv where host + # executable should be. + for idx, item in enumerate(sys_args): + if item.replace("\\", "/").lower() == modified_current_file: + after_script_idx = idx + 1 + break + + # Validate that there is at least one argument after script path + launch_args = None + if after_script_idx is not None: + launch_args = sys_args[after_script_idx:] + + if launch_args: + # Launch host implementation + host_main(*launch_args) + else: + # Show message box + on_invalid_args(after_script_idx is None) + + +if __name__ == "__main__": + main(sys.argv) diff --git a/client/ayon_core/hosts/photoshop/api/pipeline.py b/client/ayon_core/hosts/photoshop/api/pipeline.py index 4e9a861220..27cfa5a7b5 100644 --- a/client/ayon_core/hosts/photoshop/api/pipeline.py +++ b/client/ayon_core/hosts/photoshop/api/pipeline.py @@ -21,14 +21,14 @@ from ayon_core.host import ( ) from ayon_core.pipeline.load import any_outdated_containers -from ayon_core.hosts.photoshop import PHOTOSHOP_HOST_DIR +from ayon_core.hosts.photoshop import PHOTOSHOP_ADDON_ROOT from ayon_core.tools.utils import get_ayon_qt_app from . import lib log = Logger.get_logger(__name__) -PLUGINS_DIR = os.path.join(PHOTOSHOP_HOST_DIR, "plugins") +PLUGINS_DIR = os.path.join(PHOTOSHOP_ADDON_ROOT, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") LOAD_PATH = os.path.join(PLUGINS_DIR, "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "create") @@ -260,7 +260,7 @@ def containerise( "name": name, "namespace": namespace, "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], "members": [str(layer.id)] } stub = lib.stub() diff --git a/client/ayon_core/hosts/photoshop/api/plugin.py b/client/ayon_core/hosts/photoshop/api/plugin.py index d4eb38300f..c11a206834 100644 --- a/client/ayon_core/hosts/photoshop/api/plugin.py +++ b/client/ayon_core/hosts/photoshop/api/plugin.py @@ -4,21 +4,21 @@ from ayon_core.pipeline import LoaderPlugin from .launch_logic import stub -def get_unique_layer_name(layers, asset_name, product_name): +def get_unique_layer_name(layers, container_name, product_name): """Prepare unique layer name. - Gets all layer names and if '_' is present, + Gets all layer names and if '_' is present, it adds suffix '1', or increases the suffix by 1. Args: layers (list) of dict with layers info (name, id etc.) - asset_name (str): + container_name (str): product_name (str): Returns: str: name_00X (without version) """ - name = "{}_{}".format(asset_name, product_name) + name = "{}_{}".format(container_name, product_name) names = {} for layer in layers: layer_name = re.sub(r'_\d{3}$', '', layer.name) diff --git a/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py new file mode 100644 index 0000000000..70f8fc730f --- /dev/null +++ b/client/ayon_core/hosts/photoshop/hooks/pre_launch_args.py @@ -0,0 +1,88 @@ +import os +import platform +import subprocess + +from ayon_core.lib import ( + get_ayon_launcher_args, + is_using_ayon_console, +) +from ayon_applications import PreLaunchHook, LaunchTypes +from ayon_core.hosts.photoshop import get_launch_script_path + + +def get_launch_kwargs(kwargs): + """Explicit setting of kwargs for Popen for Photoshop. + + Expected behavior + - ayon_console opens window with logs + - ayon has stdout/stderr available for capturing + + Args: + kwargs (Union[dict, None]): Current kwargs or None. + + """ + if kwargs is None: + kwargs = {} + + if platform.system().lower() != "windows": + return kwargs + + if not is_using_ayon_console(): + kwargs.update({ + "creationflags": subprocess.CREATE_NEW_CONSOLE + }) + else: + kwargs.update({ + "creationflags": subprocess.CREATE_NO_WINDOW, + "stdout": subprocess.DEVNULL, + "stderr": subprocess.DEVNULL + }) + return kwargs + + +class PhotoshopPrelaunchHook(PreLaunchHook): + """Launch arguments preparation. + + Hook add python executable and script path to Photoshop implementation + before Photoshop executable and add last workfile path to launch arguments. + + Existence of last workfile is checked. If workfile does not exists tries + to copy templated workfile from predefined path. + """ + app_groups = {"photoshop"} + + order = 20 + launch_types = {LaunchTypes.local} + + def execute(self): + # Pop executable + executable_path = self.launch_context.launch_args.pop(0) + + # Pop rest of launch arguments - There should not be other arguments! + remainders = [] + while self.launch_context.launch_args: + remainders.append(self.launch_context.launch_args.pop(0)) + + script_path = get_launch_script_path() + + new_launch_args = get_ayon_launcher_args( + "run", script_path, executable_path + ) + # Add workfile path if exists + workfile_path = self.data["last_workfile_path"] + if ( + self.data.get("start_last_workfile") + and workfile_path + and os.path.exists(workfile_path) + ): + new_launch_args.append(workfile_path) + + # Append as whole list as these arguments should not be separated + self.launch_context.launch_args.append(new_launch_args) + + if remainders: + self.launch_context.launch_args.extend(remainders) + + self.launch_context.kwargs = get_launch_kwargs( + self.launch_context.kwargs + ) diff --git a/client/ayon_core/hosts/photoshop/lib.py b/client/ayon_core/hosts/photoshop/lib.py index 6d5be48bc2..dd227c5d81 100644 --- a/client/ayon_core/hosts/photoshop/lib.py +++ b/client/ayon_core/hosts/photoshop/lib.py @@ -1,7 +1,8 @@ import re +import ayon_api + import ayon_core.hosts.photoshop.api as api -from ayon_core.client import get_asset_by_name from ayon_core.lib import prepare_template_data from ayon_core.pipeline import ( AutoCreator, @@ -40,33 +41,38 @@ class PSAutoCreator(AutoCreator): context = self.create_context project_name = context.get_current_project_name() - asset_name = context.get_current_asset_name() + folder_path = context.get_current_folder_path() task_name = context.get_current_task_name() host_name = context.host_name if existing_instance is None: - existing_instance_asset = None + existing_instance_folder = None else: - existing_instance_asset = existing_instance["folderPath"] + existing_instance_folder = existing_instance["folderPath"] if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": self.default_variant } data.update(self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, None @@ -83,18 +89,23 @@ class PSAutoCreator(AutoCreator): new_instance.data_to_store()) elif ( - existing_instance_asset != asset_name + existing_instance_folder != folder_path or existing_instance["task"] != task_name ): - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_path existing_instance["task"] = task_name existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py b/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py index 11bf92d5fb..a3bc77c640 100644 --- a/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/create/create_flatten_image.py @@ -1,11 +1,9 @@ -from ayon_core.pipeline import CreatedInstance +import ayon_api -from ayon_core.lib import BoolDef import ayon_core.hosts.photoshop.api as api from ayon_core.hosts.photoshop.lib import PSAutoCreator, clean_product_name -from ayon_core.pipeline.create import get_product_name -from ayon_core.lib import prepare_template_data -from ayon_core.client import get_asset_by_name +from ayon_core.lib import BoolDef, prepare_template_data +from ayon_core.pipeline.create import get_product_name, CreatedInstance class AutoImageCreator(PSAutoCreator): @@ -32,27 +30,29 @@ class AutoImageCreator(PSAutoCreator): context = self.create_context project_name = context.get_current_project_name() - asset_name = context.get_current_asset_name() + folder_path = context.get_current_folder_path() task_name = context.get_current_task_name() host_name = context.host_name - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) - if existing_instance is None: - existing_instance_asset = None - else: - existing_instance_asset = existing_instance["folderPath"] + existing_folder_path = None + if existing_instance is not None: + existing_folder_path = existing_instance["folderPath"] if existing_instance is None: product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, } @@ -70,17 +70,17 @@ class AutoImageCreator(PSAutoCreator): new_instance.data_to_store()) elif ( # existing instance from different context - existing_instance_asset != asset_name + existing_folder_path != folder_path or existing_instance["task"] != task_name ): product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_path existing_instance["task"] = task_name existing_instance["productName"] = product_name @@ -128,19 +128,26 @@ class AutoImageCreator(PSAutoCreator): def get_product_name( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name=None, instance=None ): if host_name is None: host_name = self.create_context.host_name + + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + dynamic_data = prepare_template_data({"layer": "{layer}"}) + product_name = get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, self.product_type, variant, diff --git a/client/ayon_core/hosts/photoshop/plugins/create/create_image.py b/client/ayon_core/hosts/photoshop/plugins/create/create_image.py index 8806aad33c..26f2469844 100644 --- a/client/ayon_core/hosts/photoshop/plugins/create/create_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/create/create_image.py @@ -247,8 +247,8 @@ class ImageCreator(Creator): def get_dynamic_data( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, instance diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_image.py b/client/ayon_core/hosts/photoshop/plugins/load/load_image.py index 0fa6bca901..d71067615e 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_image.py @@ -11,14 +11,14 @@ class ImageLoader(photoshop.PhotoshopLoader): Stores the imported asset in a container named after the asset. """ - families = ["image", "render"] - representations = ["*"] + product_types = {"image", "render"} + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() layer_name = get_unique_layer_name( stub.get_layers(), - context["asset"]["name"], + context["folder"]["name"], name ) with photoshop.maintained_selection(): @@ -36,33 +36,35 @@ class ImageLoader(photoshop.PhotoshopLoader): self.__class__.__name__ ) - def update(self, container, representation): + def update(self, container, context): """ Switch asset or change version """ stub = self.get_stub() layer = container.pop("layer") - context = representation.get("context", {}) + repre_entity = context["representation"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) - layer_name = "{}_{}".format(context["asset"], context["subset"]) + layer_name = "{}_{}".format(folder_name, product_name) # switching assets if namespace_from_container != layer_name: layer_name = get_unique_layer_name( - stub.get_layers(), context["asset"], context["subset"] + stub.get_layers(), folder_name, product_name ) else: # switching version - keep same name layer_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) with photoshop.maintained_selection(): stub.replace_smart_object( layer, path, layer_name ) stub.imprint( - layer.id, {"representation": str(representation["_id"])} + layer.id, {"representation": repre_entity["id"]} ) def remove(self, container): @@ -77,8 +79,8 @@ class ImageLoader(photoshop.PhotoshopLoader): stub.imprint(layer.id, {}) stub.delete_layer(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def import_layer(self, file_name, layer_name, stub): return stub.import_smart_object(file_name, layer_name) diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py index 06ac70041e..dd14543f3e 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_image_from_sequence.py @@ -19,13 +19,13 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader): This loader will be triggered multiple times, but selected name will match only to proper path. - Loader doesnt do containerization as there is currently no data model + Loader doesn't do containerization as there is currently no data model of 'frame of rendered files' (only rendered sequence), update would be difficult. """ - families = ["render"] - representations = ["*"] + product_types = {"render"} + representations = {"*"} options = [] def load(self, context, name=None, namespace=None, data=None): @@ -40,7 +40,7 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader): stub = self.get_stub() layer_name = get_unique_layer_name( - stub.get_layers(), context["asset"]["name"], name + stub.get_layers(), context["folder"]["name"], name ) with photoshop.maintained_selection(): @@ -86,7 +86,7 @@ class ImageFromSequenceLoader(photoshop.PhotoshopLoader): ) ] - def update(self, container, representation): + def update(self, container, context): """No update possible, not containerized.""" pass diff --git a/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py b/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py index e2fec039d0..b563faff82 100644 --- a/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py +++ b/client/ayon_core/hosts/photoshop/plugins/load/load_reference.py @@ -14,13 +14,13 @@ class ReferenceLoader(photoshop.PhotoshopLoader): "Cannot write to closing transport", possible refactor. """ - families = ["image", "render"] - representations = ["*"] + product_types = {"image", "render"} + representations = {"*"} def load(self, context, name=None, namespace=None, data=None): stub = self.get_stub() layer_name = get_unique_layer_name( - stub.get_layers(), context["asset"]["name"], name + stub.get_layers(), context["folder"]["name"], name ) with photoshop.maintained_selection(): path = self.filepath_from_context(context) @@ -37,32 +37,34 @@ class ReferenceLoader(photoshop.PhotoshopLoader): self.__class__.__name__ ) - def update(self, container, representation): - """ Switch asset or change version """ + def update(self, container, context): + """ Switch asset or change version.""" stub = self.get_stub() layer = container.pop("layer") - context = representation.get("context", {}) + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + repre_entity = context["representation"] namespace_from_container = re.sub(r'_\d{3}$', '', container["namespace"]) - layer_name = "{}_{}".format(context["asset"], context["subset"]) + layer_name = "{}_{}".format(folder_name, product_name) # switching assets if namespace_from_container != layer_name: layer_name = get_unique_layer_name( - stub.get_layers(), context["asset"], context["subset"] + stub.get_layers(), folder_name, product_name ) else: # switching version - keep same name layer_name = container["namespace"] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) with photoshop.maintained_selection(): stub.replace_smart_object( layer, path, layer_name ) stub.imprint( - layer.id, {"representation": str(representation["_id"])} + layer.id, {"representation": repre_entity["id"]} ) def remove(self, container): @@ -76,8 +78,8 @@ class ReferenceLoader(photoshop.PhotoshopLoader): stub.imprint(layer.id, {}) stub.delete_layer(layer.id) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) def import_layer(self, file_name, layer_name, stub): return stub.import_smart_object( diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py b/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py index 6f86d98580..68c3b5b249 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/closePS.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- """Close PS after publish. For Webpublishing only.""" -import os - import pyblish.api from ayon_core.hosts.photoshop import api as photoshop diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py index 7773b444d2..adbe02eb74 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image.py @@ -1,6 +1,5 @@ import pyblish.api -from ayon_core.client import get_asset_name_identifier from ayon_core.hosts.photoshop import api as photoshop from ayon_core.pipeline.create import get_product_name @@ -10,7 +9,6 @@ class CollectAutoImage(pyblish.api.ContextPlugin): """ label = "Collect Auto Image" - order = pyblish.api.CollectorOrder hosts = ["photoshop"] order = pyblish.api.CollectorOrder + 0.2 @@ -25,10 +23,13 @@ class CollectAutoImage(pyblish.api.ContextPlugin): project_name = context.data["projectName"] proj_settings = context.data["project_settings"] - task_name = context.data["task"] host_name = context.data["hostName"] - asset_doc = context.data["assetEntity"] - folder_path = get_asset_name_identifier(asset_doc) + folder_entity = context.data["folderEntity"] + task_entity = context.data["taskEntity"] + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] auto_creator = proj_settings.get( "photoshop", {}).get( @@ -81,15 +82,15 @@ class CollectAutoImage(pyblish.api.ContextPlugin): product_name = get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, product_type, variant, ) instance = context.create_instance(product_name) - instance.data["folderPath"] = folder_path + instance.data["folderPath"] = folder_entity["path"] instance.data["productType"] = product_type instance.data["productName"] = product_name instance.data["ids"] = publishable_ids diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image_refresh.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image_refresh.py index 0585f4f226..7a5f297c89 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image_refresh.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_image_refresh.py @@ -8,7 +8,6 @@ class CollectAutoImageRefresh(pyblish.api.ContextPlugin): """ label = "Collect Auto Image Refresh" - order = pyblish.api.CollectorOrder hosts = ["photoshop"] order = pyblish.api.CollectorOrder + 0.2 diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py index 14f2f23985..d7267d253a 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_review.py @@ -7,7 +7,6 @@ Provides: """ import pyblish.api -from ayon_core.client import get_asset_name_identifier from ayon_core.hosts.photoshop import api as photoshop from ayon_core.pipeline.create import get_product_name @@ -63,16 +62,18 @@ class CollectAutoReview(pyblish.api.ContextPlugin): project_name = context.data["projectName"] proj_settings = context.data["project_settings"] - task_name = context.data["task"] host_name = context.data["hostName"] - asset_doc = context.data["assetEntity"] - - folder_path = get_asset_name_identifier(asset_doc) + folder_entity = context.data["folderEntity"] + task_entity = context.data["taskEntity"] + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] product_name = get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, product_type, variant, @@ -88,7 +89,7 @@ class CollectAutoReview(pyblish.api.ContextPlugin): "family": product_type, "families": [product_type], "representations": [], - "folderPath": folder_path, + "folderPath": folder_entity["path"], "publish": self.publish }) diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py index 0b12195603..af74c76a15 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_auto_workfile.py @@ -1,7 +1,6 @@ import os import pyblish.api -from ayon_core.client import get_asset_name_identifier from ayon_core.hosts.photoshop import api as photoshop from ayon_core.pipeline.create import get_product_name @@ -69,13 +68,17 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin): task_name = context.data["task"] host_name = context.data["hostName"] - asset_doc = context.data["assetEntity"] + folder_entity = context.data["folderEntity"] + task_entity = context.data["taskEntity"] + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] - folder_path = get_asset_name_identifier(asset_doc) product_name = get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, product_type, variant, @@ -92,7 +95,7 @@ class CollectAutoWorkfile(pyblish.api.ContextPlugin): "family": product_type, "families": [product_type], "representations": [], - "folderPath": folder_path + "folderPath": folder_entity["path"] }) # creating representation diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py index a32b5f8fa5..c43a957576 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -18,7 +18,7 @@ import os import pyblish.api from openpype_modules.webpublisher.lib import ( - get_batch_asset_task_info, + get_batch_context_info, parse_json ) from ayon_core.lib import is_in_tests @@ -64,14 +64,14 @@ class CollectBatchData(pyblish.api.ContextPlugin): context.data["batchDir"] = batch_dir context.data["batchData"] = batch_data - asset_name, task_name, task_type = get_batch_asset_task_info( + folder_path, task_name, task_type = get_batch_context_info( batch_data["context"] ) - os.environ["AYON_FOLDER_PATH"] = asset_name + os.environ["AYON_FOLDER_PATH"] = folder_path os.environ["AYON_TASK_NAME"] = task_name - context.data["folderPath"] = asset_name + context.data["folderPath"] = folder_path context.data["task"] = task_name context.data["taskType"] = task_type context.data["project_name"] = project_name diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index f11ba4383a..e8f7c7e3df 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -57,7 +57,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): existing_product_names = self._get_existing_product_names(context) # from CollectBatchData - asset_name = context.data["folderPath"] + folder_path = context.data["folderPath"] task_name = context.data["task"] variant = context.data["variant"] project_name = context.data["projectEntity"]["name"] @@ -120,7 +120,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): context, layer, resolved_product_type, - asset_name, + folder_path, product_name, task_name ) @@ -146,7 +146,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): context, first_layer, product_type, - asset_name, + folder_path, product_name, task_name ) diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py b/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py index 53b2503ba2..84c9fa3e62 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/collect_published_version.py @@ -9,15 +9,15 @@ product is used instead. This plugin runs only in remote publishing (eg. Webpublisher). Requires: - context.data["assetEntity"] + context.data["folderEntity"] Provides: context["version"] - incremented latest published workfile version """ import pyblish.api +import ayon_api -from ayon_core.client import get_last_version_by_subset_name from ayon_core.pipeline.version_start import get_versioning_start @@ -42,15 +42,14 @@ class CollectPublishedVersion(pyblish.api.ContextPlugin): return project_name = context.data["projectName"] - asset_doc = context.data["assetEntity"] - asset_id = asset_doc["_id"] + folder_id = context.data["folderEntity"]["id"] - version_doc = get_last_version_by_subset_name( - project_name, workfile_product_name, asset_id + version_entity = ayon_api.get_last_version_by_product_name( + project_name, workfile_product_name, folder_id ) - if version_doc: - version_int = int(version_doc["name"]) + 1 + if version_entity: + version_int = int(version_entity["version"]) + 1 else: version_int = get_versioning_start( project_name, diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_instance_asset.xml b/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_instance_asset.xml index e05ac92182..c033f922c6 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_instance_asset.xml +++ b/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_instance_asset.xml @@ -1,9 +1,9 @@ -Asset does not match +Folder does not match -## Collected asset name is not same as in context +## Collected folder path is not same as in context {msg} ### How to repair? @@ -11,10 +11,10 @@ Refresh Publish afterwards (circle arrow at the bottom right). If that's not correct value, close workfile and reopen via Workfiles to get - proper context asset name OR disable this validator and publish again + proper context folder path OR disable this validator and publish again if you are publishing to different context deliberately. - (Context means combination of project, asset name and task name.) + (Context means combination of project, folder path and task name.) \ No newline at end of file diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml b/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml index 28c2c2c773..28c2329c8a 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml +++ b/client/ayon_core/hosts/photoshop/plugins/publish/help/validate_naming.xml @@ -1,11 +1,11 @@ -Subset name +Product name ## Invalid product or layer name -Subset or layer name cannot contain specific characters (spaces etc) which could cause issue when product name is used in a published file name. +Product or layer name cannot contain specific characters (spaces etc) which could cause issue when product name is used in a published file name. {msg} ### How to repair? diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py b/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py index 67a7303316..c3a6822f32 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/validate_instance_asset.py @@ -1,6 +1,6 @@ import pyblish.api -from ayon_core.pipeline import get_current_asset_name +from ayon_core.pipeline import get_current_folder_path from ayon_core.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -9,8 +9,8 @@ from ayon_core.pipeline.publish import ( from ayon_core.hosts.photoshop import api as photoshop -class ValidateInstanceAssetRepair(pyblish.api.Action): - """Repair the instance asset.""" +class ValidateInstanceFolderRepair(pyblish.api.Action): + """Repair the instance folder.""" label = "Repair" icon = "wrench" @@ -21,50 +21,54 @@ class ValidateInstanceAssetRepair(pyblish.api.Action): # Get the errored instances failed = [] for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): + if ( + result["error"] is not None + and result["instance"] is not None + and result["instance"] not in failed + ): failed.append(result["instance"]) # Apply pyblish.logic to get the instances for the plug-in instances = pyblish.api.instances_by_plugin(failed, plugin) stub = photoshop.stub() - current_asset_name = get_current_asset_name() + current_folder_path = get_current_folder_path() for instance in instances: data = stub.read(instance[0]) - data["folderPath"] = current_asset_name + data["folderPath"] = current_folder_path stub.imprint(instance[0], data) class ValidateInstanceAsset(OptionalPyblishPluginMixin, pyblish.api.InstancePlugin): - """Validate the instance asset is the current selected context asset. + """Validate the instance folder is the current selected context folder. As it might happen that multiple worfiles are opened, switching between them would mess with selected context. - In that case outputs might be output under wrong asset! + In that case outputs might be output under wrong folder! - Repair action will use Context asset value (from Workfiles or Launcher) + Repair action will use Context folder value (from Workfiles or Launcher) Closing and reopening with Workfiles will refresh Context value. """ - label = "Validate Instance Asset" + label = "Validate Instance Folder" hosts = ["photoshop"] optional = True - actions = [ValidateInstanceAssetRepair] + actions = [ValidateInstanceFolderRepair] order = ValidateContentsOrder def process(self, instance): - instance_asset = instance.data["folderPath"] - current_asset = get_current_asset_name() + instance_folder_path = instance.data["folderPath"] + current_folder_path = get_current_folder_path() - if instance_asset != current_asset: + if instance_folder_path != current_folder_path: msg = ( - f"Instance asset {instance_asset} is not the same " - f"as current context {current_asset}." + f"Instance folder {instance_folder_path} is not the same" + f" as current context {current_folder_path}." ) repair_msg = ( - f"Repair with 'Repair' button to use '{current_asset}'.\n" + "Repair with 'Repair' button" + f" to use '{current_folder_path}'.\n" ) formatting_data = {"msg": msg, "repair_msg": repair_msg} diff --git a/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py b/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py index ce940c47ce..13c6a54fd2 100644 --- a/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py +++ b/client/ayon_core/hosts/photoshop/plugins/publish/validate_naming.py @@ -11,7 +11,7 @@ from ayon_core.pipeline.publish import ( class ValidateNamingRepair(pyblish.api.Action): - """Repair the instance asset.""" + """Repair the instance folder.""" label = "Repair" icon = "wrench" @@ -22,8 +22,11 @@ class ValidateNamingRepair(pyblish.api.Action): # Get the errored instances failed = [] for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): + if ( + result["error"] is not None + and result["instance"] is not None + and result["instance"] not in failed + ): failed.append(result["instance"]) invalid_chars, replace_char = plugin.get_replace_chars() diff --git a/client/ayon_core/hosts/resolve/README.markdown b/client/ayon_core/hosts/resolve/README.markdown index a8bb071e7e..064e791f65 100644 --- a/client/ayon_core/hosts/resolve/README.markdown +++ b/client/ayon_core/hosts/resolve/README.markdown @@ -18,7 +18,7 @@ This is how it looks on my testing project timeline ![image](https://user-images.githubusercontent.com/40640033/102637638-96ec6600-4156-11eb-9656-6e8e3ce4baf8.png) Notice I had renamed tracks to `main` (holding metadata markers) and `review` used for generating review data with ffmpeg confersion to jpg sequence. -1. you need to start OpenPype menu from Resolve/EditTab/Menu/Workspace/Scripts/Comp/**__OpenPype_Menu__** +1. you need to start AYON menu from Resolve/EditTab/Menu/Workspace/Scripts/Comp/**__OpenPype_Menu__** 2. then select any clips in `main` track and change their color to `Chocolate` 3. in OpenPype Menu select `Create` 4. in Creator select `Create Publishable Clip [New]` (temporary name) diff --git a/client/ayon_core/hosts/resolve/api/__init__.py b/client/ayon_core/hosts/resolve/api/__init__.py index dba275e6c4..3359430ef5 100644 --- a/client/ayon_core/hosts/resolve/api/__init__.py +++ b/client/ayon_core/hosts/resolve/api/__init__.py @@ -44,7 +44,7 @@ from .lib import ( get_reformated_path ) -from .menu import launch_pype_menu +from .menu import launch_ayon_menu from .plugin import ( ClipLoader, @@ -113,7 +113,7 @@ __all__ = [ "get_reformated_path", # menu - "launch_pype_menu", + "launch_ayon_menu", # plugin "ClipLoader", diff --git a/client/ayon_core/hosts/resolve/api/lib.py b/client/ayon_core/hosts/resolve/api/lib.py index 6e4e17811f..b9ad81c79d 100644 --- a/client/ayon_core/hosts/resolve/api/lib.py +++ b/client/ayon_core/hosts/resolve/api/lib.py @@ -330,19 +330,25 @@ def get_timeline_item(media_pool_item: object, Returns: object: resolve.TimelineItem """ - _clip_property = media_pool_item.GetClipProperty - clip_name = _clip_property("File Name") + clip_name = media_pool_item.GetClipProperty("File Name") output_timeline_item = None timeline = timeline or get_current_timeline() with maintain_current_timeline(timeline): # search the timeline for the added clip - for _ti_data in get_current_timeline_items(): - _ti_clip = _ti_data["clip"]["item"] - _ti_clip_property = _ti_clip.GetMediaPoolItem().GetClipProperty - if clip_name in _ti_clip_property("File Name"): - output_timeline_item = _ti_clip + for ti_data in get_current_timeline_items(): + ti_clip_item = ti_data["clip"]["item"] + ti_media_pool_item = ti_clip_item.GetMediaPoolItem() + + # Skip items that do not have a media pool item, like for example + # an "Adjustment Clip" or a "Fusion Composition" from the effects + # toolbox + if not ti_media_pool_item: + continue + + if clip_name in ti_media_pool_item.GetClipProperty("File Name"): + output_timeline_item = ti_clip_item return output_timeline_item @@ -713,6 +719,11 @@ def swap_clips(from_clip, to_clip, to_in_frame, to_out_frame): bool: True if successfully replaced """ + # copy ACES input transform from timeline clip to new media item + mediapool_item_from_timeline = from_clip.GetMediaPoolItem() + _idt = mediapool_item_from_timeline.GetClipProperty('IDT') + to_clip.SetClipProperty('IDT', _idt) + _clip_prop = to_clip.GetClipProperty to_clip_name = _clip_prop("File Name") # add clip item as take to timeline @@ -914,7 +925,7 @@ def get_reformated_path(path, padded=False, first=False): path (str): path url or simple file name Returns: - type: string with reformated path + type: string with reformatted path Example: get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr diff --git a/client/ayon_core/hosts/resolve/api/menu.py b/client/ayon_core/hosts/resolve/api/menu.py index 59eba14d83..fc2c15ad6d 100644 --- a/client/ayon_core/hosts/resolve/api/menu.py +++ b/client/ayon_core/hosts/resolve/api/menu.py @@ -38,9 +38,9 @@ class Spacer(QtWidgets.QWidget): self.setLayout(layout) -class OpenPypeMenu(QtWidgets.QWidget): +class AYONMenu(QtWidgets.QWidget): def __init__(self, *args, **kwargs): - super(OpenPypeMenu, self).__init__(*args, **kwargs) + super(AYONMenu, self).__init__(*args, **kwargs) self.setObjectName(f"{MENU_LABEL}Menu") @@ -48,6 +48,7 @@ class OpenPypeMenu(QtWidgets.QWidget): QtCore.Qt.Window | QtCore.Qt.CustomizeWindowHint | QtCore.Qt.WindowTitleHint + | QtCore.Qt.WindowMinimizeButtonHint | QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowStaysOnTopHint ) @@ -170,14 +171,14 @@ class OpenPypeMenu(QtWidgets.QWidget): host_tools.show_experimental_tools_dialog() -def launch_pype_menu(): +def launch_ayon_menu(): app = QtWidgets.QApplication(sys.argv) - pype_menu = OpenPypeMenu() + ayon_menu = AYONMenu() stylesheet = load_stylesheet() - pype_menu.setStyleSheet(stylesheet) + ayon_menu.setStyleSheet(stylesheet) - pype_menu.show() + ayon_menu.show() sys.exit(app.exec_()) diff --git a/client/ayon_core/hosts/resolve/api/menu_style.qss b/client/ayon_core/hosts/resolve/api/menu_style.qss index 3d51c7139f..ad8932d881 100644 --- a/client/ayon_core/hosts/resolve/api/menu_style.qss +++ b/client/ayon_core/hosts/resolve/api/menu_style.qss @@ -51,7 +51,7 @@ QLineEdit { qproperty-alignment: AlignCenter; } -#OpenPypeMenu { +#AYONMenu { qproperty-alignment: AlignLeft; min-width: 10em; border: 1px solid #fef9ef; diff --git a/client/ayon_core/hosts/resolve/api/pipeline.py b/client/ayon_core/hosts/resolve/api/pipeline.py index 19d33971dc..15e4f1203d 100644 --- a/client/ayon_core/hosts/resolve/api/pipeline.py +++ b/client/ayon_core/hosts/resolve/api/pipeline.py @@ -123,7 +123,7 @@ def containerise(timeline_item, "name": str(name), "namespace": str(namespace), "loader": str(loader), - "representation": str(context["representation"]["_id"]), + "representation": context["representation"]["id"], }) if data: diff --git a/client/ayon_core/hosts/resolve/api/plugin.py b/client/ayon_core/hosts/resolve/api/plugin.py index 0c63dead32..0b339cdf7c 100644 --- a/client/ayon_core/hosts/resolve/api/plugin.py +++ b/client/ayon_core/hosts/resolve/api/plugin.py @@ -325,7 +325,7 @@ class ClipLoader: "or call your supervisor") # inject asset data to representation dict - self._get_asset_data() + self._get_folder_attributes() # add active components to class if self.new_timeline: @@ -355,40 +355,41 @@ class ClipLoader: } """ # create name - representation = self.context["representation"] - representation_context = representation["context"] - asset = str(representation_context["asset"]) - product_name = str(representation_context["subset"]) - representation_name = str(representation_context["representation"]) + folder_entity = self.context["folder"] + product_name = self.context["product"]["name"] + repre_entity = self.context["representation"] + + folder_name = folder_entity["name"] + folder_path = folder_entity["path"] + representation_name = repre_entity["name"] + self.data["clip_name"] = "_".join([ - asset, + folder_name, product_name, representation_name ]) - self.data["versionData"] = self.context["version"]["data"] + self.data["versionAttributes"] = self.context["version"]["attrib"] self.data["timeline_basename"] = "timeline_{}_{}".format( product_name, representation_name) # solve project bin structure path - hierarchy = str("/".join(( - "Loader", - representation_context["hierarchy"].replace("\\", "/"), - asset - ))) + hierarchy = "Loader{}".format(folder_path) self.data["binPath"] = hierarchy return True - def _get_asset_data(self): + def _get_folder_attributes(self): """ Get all available asset data joint `data` key with asset.data dict into the representation """ - self.data["assetData"] = copy.deepcopy(self.context["asset"]["data"]) + self.data["folderAttributes"] = copy.deepcopy( + self.context["folder"]["attrib"] + ) def load(self, files): """Load clip into timeline @@ -410,17 +411,20 @@ class ClipLoader: source_out = int(_clip_property("End")) source_duration = int(_clip_property("Frames")) + # Trim clip start if slate is present + if "slate" in self.data["versionAttributes"]["families"]: + source_in += 1 + source_duration = source_out - source_in + 1 + if not self.with_handles: # Load file without the handles of the source media # We remove the handles from the source in and source out # so that the handles are excluded in the timeline - handle_start = 0 - handle_end = 0 # get version data frame data from db - version_data = self.data["versionData"] - frame_start = version_data.get("frameStart") - frame_end = version_data.get("frameEnd") + version_attributes = self.data["versionAttributes"] + frame_start = version_attributes.get("frameStart") + frame_end = version_attributes.get("frameEnd") # The version data usually stored the frame range + handles of the # media however certain representations may be shorter because they @@ -432,10 +436,10 @@ class ClipLoader: # from source and out if frame_start is not None and frame_end is not None: # Version has frame range data, so we can compare media length - handle_start = version_data.get("handleStart", 0) - handle_end = version_data.get("handleEnd", 0) + handle_start = version_attributes.get("handleStart", 0) + handle_end = version_attributes.get("handleEnd", 0) frame_start_handle = frame_start - handle_start - frame_end_handle = frame_start + handle_end + frame_end_handle = frame_end + handle_end database_frame_duration = int( frame_end_handle - frame_start_handle + 1 ) @@ -451,7 +455,7 @@ class ClipLoader: else: # set timeline start frame + original clip in frame timeline_in = int( - timeline_start + self.data["assetData"]["clipIn"]) + timeline_start + self.data["folderAttributes"]["clipIn"]) # make track item from source in bin as item timeline_item = lib.create_timeline_item( @@ -477,14 +481,16 @@ class ClipLoader: ) _clip_property = media_pool_item.GetClipProperty - source_in = int(_clip_property("Start")) - source_out = int(_clip_property("End")) + # Read trimming from timeline item + timeline_item_in = timeline_item.GetLeftOffset() + timeline_item_len = timeline_item.GetDuration() + timeline_item_out = timeline_item_in + timeline_item_len lib.swap_clips( timeline_item, media_pool_item, - source_in, - source_out + timeline_item_in, + timeline_item_out ) print("Loading clips: `{}`".format(self.data["clip_name"])) @@ -538,7 +544,7 @@ class TimelineItemLoader(LoaderPlugin): ): pass - def update(self, container, representation): + def update(self, container, context): """Update an existing `container` """ pass @@ -869,14 +875,14 @@ class PublishClip: def _convert_to_entity(self, key): """ Converting input key to key with type. """ # convert to entity type - entity_type = self.types.get(key) + folder_type = self.types.get(key) - assert entity_type, "Missing entity type for `{}`".format( + assert folder_type, "Missing folder type for `{}`".format( key ) return { - "entity_type": entity_type, + "folder_type": folder_type, "entity_name": self.hierarchy_data[key]["value"].format( **self.timeline_item_default_data ) diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py index d82651289c..cf9953bfe9 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_last_workfile.py @@ -1,5 +1,5 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class PreLaunchResolveLastWorkfile(PreLaunchHook): diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py index c14fd75b2f..f45e28d5ab 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_setup.py @@ -1,7 +1,7 @@ import os from pathlib import Path import platform -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes from ayon_core.hosts.resolve.utils import setup diff --git a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py index ab16053450..300564f7cc 100644 --- a/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py +++ b/client/ayon_core/hosts/resolve/hooks/pre_resolve_startup.py @@ -1,6 +1,6 @@ import os -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes import ayon_core.hosts.resolve diff --git a/client/ayon_core/hosts/resolve/otio/utils.py b/client/ayon_core/hosts/resolve/otio/utils.py index 7d8089e055..c03305ff23 100644 --- a/client/ayon_core/hosts/resolve/otio/utils.py +++ b/client/ayon_core/hosts/resolve/otio/utils.py @@ -25,7 +25,7 @@ def get_reformated_path(path, padded=True, first=False): path (str): path url or simple file name Returns: - type: string with reformated path + type: string with reformatted path Example: get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr diff --git a/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py b/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py index 3a2a0345ea..cbc03da3b6 100644 --- a/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py +++ b/client/ayon_core/hosts/resolve/plugins/create/create_shot_clip.py @@ -166,7 +166,7 @@ class CreateShotClip(plugin.Creator): "type": "QCheckBox", "label": "Source resolution", "target": "tag", - "toolTip": "Is resloution taken from timeline or source?", # noqa + "toolTip": "Is resolution taken from timeline or source?", # noqa "order": 4}, } }, @@ -207,7 +207,7 @@ class CreateShotClip(plugin.Creator): presets = None def process(self): - # get key pares from presets and match it on ui inputs + # get key pairs from presets and match it on ui inputs for k, v in self.gui_inputs.items(): if v["type"] in ("dict", "section"): # nested dictionary (only one level allowed diff --git a/client/ayon_core/hosts/resolve/plugins/load/load_clip.py b/client/ayon_core/hosts/resolve/plugins/load/load_clip.py index 4d1f8f1f7c..2ce1c43957 100644 --- a/client/ayon_core/hosts/resolve/plugins/load/load_clip.py +++ b/client/ayon_core/hosts/resolve/plugins/load/load_clip.py @@ -1,8 +1,5 @@ -from ayon_core.client import get_last_version_by_subset_id -from ayon_core.pipeline import ( - get_representation_context, - get_current_project_name -) +import ayon_api + from ayon_core.hosts.resolve.api import lib, plugin from ayon_core.hosts.resolve.api.pipeline import ( containerise, @@ -21,9 +18,9 @@ class LoadClip(plugin.TimelineItemLoader): during conforming to project """ - families = ["render2d", "source", "plate", "render", "review"] + product_types = {"render2d", "source", "plate", "render", "review"} - representations = ["*"] + representations = {"*"} extensions = set( ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) ) @@ -50,7 +47,11 @@ class LoadClip(plugin.TimelineItemLoader): namespace = namespace or timeline_item.GetName() # update color of clip regarding the version order - self.set_item_color(timeline_item, version=context["version"]) + self.set_item_color( + context["project"]["name"], + timeline_item, + context["version"] + ) data_imprint = self.get_tag_data(context, name, namespace) return containerise( @@ -59,27 +60,31 @@ class LoadClip(plugin.TimelineItemLoader): self.__class__.__name__, data_imprint) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): """ Updating previously loaded clips """ - context = get_representation_context(representation) + repre_entity = context["representation"] name = container['name'] namespace = container['namespace'] timeline_item = container["_timeline_item"] media_pool_item = timeline_item.GetMediaPoolItem() - files = plugin.get_representation_files(representation) + files = plugin.get_representation_files(repre_entity) loader = plugin.ClipLoader(self, context) timeline_item = loader.update(timeline_item, files) # update color of clip regarding the version order - self.set_item_color(timeline_item, version=context["version"]) + self.set_item_color( + context["project"]["name"], + timeline_item, + context["version"] + ) # if original media pool item has no remaining usages left # remove it from the media pool @@ -92,11 +97,10 @@ class LoadClip(plugin.TimelineItemLoader): def get_tag_data(self, context, name, namespace): """Return data to be imprinted on the timeline item marker""" - representation = context["representation"] - version = context['version'] - version_data = version.get("data", {}) - version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + repre_entity = context["representation"] + version_entity = context["version"] + version_attributes = version_entity["attrib"] + colorspace = version_attributes.get("colorSpace", None) object_name = "{}_{}".format(name, namespace) # add additional metadata from the version to imprint Avalon knob @@ -106,37 +110,34 @@ class LoadClip(plugin.TimelineItemLoader): "fps", "handleStart", "handleEnd" ] data = { - key: version_data.get(key, "None") for key in add_version_data_keys + key: version_attributes.get(key, "None") + for key in add_version_data_keys } # add variables related to version context data.update({ - "representation": str(representation["_id"]), - "version": version_name, + "representation": repre_entity["id"], + "version": version_entity["version"], "colorspace": colorspace, "objectName": object_name }) return data @classmethod - def set_item_color(cls, timeline_item, version): + def set_item_color(cls, project_name, timeline_item, version_entity): """Color timeline item based on whether it is outdated or latest""" - # define version name - version_name = version.get("name", None) # get all versions in list - project_name = get_current_project_name() - last_version_doc = get_last_version_by_subset_id( + last_version_entity = ayon_api.get_last_version_by_product_id( project_name, - version["parent"], + version_entity["productId"], fields=["name"] ) - if last_version_doc: - last_version = last_version_doc["name"] - else: - last_version = None + last_version_id = None + if last_version_entity: + last_version_id = last_version_entity["id"] # set clip colour - if version_name == last_version: + if version_entity["id"] == last_version_id: timeline_item.SetClipColor(cls.clip_color_last) else: timeline_item.SetClipColor(cls.clip_color) diff --git a/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py b/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py index b1374859e3..caa79c85c0 100644 --- a/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py +++ b/client/ayon_core/hosts/resolve/plugins/publish/precollect_instances.py @@ -63,7 +63,12 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if k not in ("id", "applieswhole", "label") }) - asset = tag_data["folder_path"] + folder_path = tag_data["folder_path"] + # Backward compatibility fix of 'entity_type' > 'folder_type' + if "parents" in data: + for parent in data["parents"]: + if "entity_type" in parent: + parent["folder_type"] = parent.pop("entity_type") # TODO: remove backward compatibility product_name = tag_data.get("productName") @@ -74,7 +79,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_name should not be missing if not product_name: self.log.error( - "Product name is not defined for: {}".format(asset)) + "Product name is not defined for: {}".format(folder_path)) # TODO: remove backward compatibility product_type = tag_data.get("productType") @@ -85,12 +90,12 @@ class PrecollectInstances(pyblish.api.ContextPlugin): # backward compatibility: product_type should not be missing if not product_type: self.log.error( - "Product type is not defined for: {}".format(asset)) + "Product type is not defined for: {}".format(folder_path)) data.update({ - "name": "{}_{}".format(asset, product_name), - "label": "{} {}".format(asset, product_name), - "folderPath": asset, + "name": "{}_{}".format(folder_path, product_name), + "label": "{} {}".format(folder_path, product_name), + "folderPath": folder_path, "item": timeline_item, "publish": get_publish_attribute(timeline_item), "fps": context.data["fps"], @@ -151,16 +156,16 @@ class PrecollectInstances(pyblish.api.ContextPlugin): if not hierarchy_data: return - asset = data["folderPath"] + folder_path = data["folderPath"] product_name = "shotMain" # insert family into families product_type = "shot" data.update({ - "name": "{}_{}".format(asset, product_name), - "label": "{} {}".format(asset, product_name), - "folderPath": asset, + "name": "{}_{}".format(folder_path, product_name), + "label": "{} {}".format(folder_path, product_name), + "folderPath": folder_path, "productName": product_name, "productType": product_type, "family": product_type, diff --git a/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py b/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py index a147c9a905..6158cf1d61 100644 --- a/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py +++ b/client/ayon_core/hosts/resolve/plugins/publish/precollect_workfile.py @@ -1,7 +1,7 @@ import pyblish.api from pprint import pformat -from ayon_core.pipeline import get_current_asset_name +from ayon_core.pipeline import get_current_folder_path from ayon_core.hosts.resolve import api as rapi from ayon_core.hosts.resolve.otio import davinci_export @@ -14,8 +14,8 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder - 0.5 def process(self, context): - current_asset_name = get_current_asset_name() - asset_name = current_asset_name.split("/")[-1] + current_folder_path = get_current_folder_path() + folder_name = current_folder_path.split("/")[-1] product_name = "workfileMain" project = rapi.get_current_project() @@ -26,10 +26,10 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin): otio_timeline = davinci_export.create_otio_timeline(project) instance_data = { - "name": "{}_{}".format(asset_name, product_name), - "label": "{} {}".format(current_asset_name, product_name), + "name": "{}_{}".format(folder_name, product_name), + "label": "{} {}".format(current_folder_path, product_name), "item": project, - "folderPath": current_asset_name, + "folderPath": current_folder_path, "productName": product_name, "productType": "workfile", "family": "workfile", diff --git a/client/ayon_core/hosts/resolve/startup.py b/client/ayon_core/hosts/resolve/startup.py index b3c1a024d9..3ad0a6bf7b 100644 --- a/client/ayon_core/hosts/resolve/startup.py +++ b/client/ayon_core/hosts/resolve/startup.py @@ -35,7 +35,7 @@ def ensure_installed_host(): def launch_menu(): print("Launching Resolve AYON menu..") ensure_installed_host() - ayon_core.hosts.resolve.api.launch_pype_menu() + ayon_core.hosts.resolve.api.launch_ayon_menu() def open_workfile(path): diff --git a/client/ayon_core/hosts/resolve/utility_scripts/AYON__Menu.py b/client/ayon_core/hosts/resolve/utility_scripts/AYON__Menu.py index 08cefb9d61..b10b477beb 100644 --- a/client/ayon_core/hosts/resolve/utility_scripts/AYON__Menu.py +++ b/client/ayon_core/hosts/resolve/utility_scripts/AYON__Menu.py @@ -8,13 +8,13 @@ log = Logger.get_logger(__name__) def main(env): - from ayon_core.hosts.resolve.api import ResolveHost, launch_pype_menu + from ayon_core.hosts.resolve.api import ResolveHost, launch_ayon_menu # activate resolve from openpype host = ResolveHost() install_host(host) - launch_pype_menu() + launch_ayon_menu() if __name__ == "__main__": diff --git a/client/ayon_core/hosts/substancepainter/api/lib.py b/client/ayon_core/hosts/substancepainter/api/lib.py index 1cb480b552..64c39943ce 100644 --- a/client/ayon_core/hosts/substancepainter/api/lib.py +++ b/client/ayon_core/hosts/substancepainter/api/lib.py @@ -586,7 +586,6 @@ def prompt_new_file_with_mesh(mesh_filepath): # TODO: find a way to improve the process event to # load more complicated mesh app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 3000) - file_dialog.done(file_dialog.Accepted) app.processEvents(QtCore.QEventLoop.AllEvents) @@ -606,7 +605,7 @@ def prompt_new_file_with_mesh(mesh_filepath): mesh_select.setVisible(False) # Ensure UI is visually up-to-date - app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents) + app.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents, 8000) # Trigger the 'select file' dialog to set the path and have the # new file dialog to use the path. @@ -623,8 +622,6 @@ def prompt_new_file_with_mesh(mesh_filepath): "Failed to set mesh path with the prompt dialog:" f"{mesh_filepath}\n\n" "Creating new project directly with the mesh path instead.") - else: - dialog.done(dialog.Accepted) new_action = _get_new_project_action() if not new_action: diff --git a/client/ayon_core/hosts/substancepainter/api/pipeline.py b/client/ayon_core/hosts/substancepainter/api/pipeline.py index 843c120d8e..23d629533c 100644 --- a/client/ayon_core/hosts/substancepainter/api/pipeline.py +++ b/client/ayon_core/hosts/substancepainter/api/pipeline.py @@ -12,17 +12,14 @@ import substance_painter.project import pyblish.api from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost -from ayon_core.settings import ( - get_current_project_settings, - get_project_settings, -) +from ayon_core.settings import get_current_project_settings from ayon_core.pipeline.template_data import get_template_data_with_names from ayon_core.pipeline import ( register_creator_plugin_path, register_loader_plugin_path, AVALON_CONTAINER_ID, - Anatomy + Anatomy, ) from ayon_core.lib import ( StringTemplate, @@ -245,16 +242,15 @@ class SubstanceHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return # Prepare formatting data if we detect any path which might have - # template tokens like {asset} in there. + # template tokens like {folder[name]} in there. formatting_data = {} has_formatting_entries = any("{" in item["value"] for item in shelves) if has_formatting_entries: project_name = self.get_current_project_name() - asset_name = self.get_current_asset_name() - task_name = self.get_current_asset_name() - project_settings = get_project_settings(project_name) + folder_path = self.get_current_folder_path() + task_name = self.get_current_task_name() formatting_data = get_template_data_with_names( - project_name, asset_name, task_name, project_settings + project_name, folder_path, task_name, project_settings ) anatomy = Anatomy(project_name) formatting_data["root"] = anatomy.roots @@ -338,7 +334,7 @@ def imprint_container(container, ("name", str(name)), ("namespace", str(namespace) if namespace else None), ("loader", str(loader.__class__.__name__)), - ("representation", str(context["representation"]["_id"])), + ("representation", context["representation"]["id"]), ] for key, value in data: container[key] = value diff --git a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py index f204ff7728..f46afadb5a 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py +++ b/client/ayon_core/hosts/substancepainter/plugins/create/create_textures.py @@ -144,7 +144,8 @@ class CreateTextures(Creator): 9: "512", 10: "1024", 11: "2048", - 12: "4096" + 12: "4096", + 13: "8192" }, default=None, label="Size"), diff --git a/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py b/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py index 23811dfd29..63b1c6c7da 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/substancepainter/plugins/create/create_workfile.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- """Creator plugin for creating workfiles.""" +import ayon_api + from ayon_core.pipeline import CreatedInstance, AutoCreator -from ayon_core.client import get_asset_by_name from ayon_core.hosts.substancepainter.api.pipeline import ( set_instances, @@ -29,7 +30,7 @@ class CreateWorkfile(AutoCreator): variant = self.default_variant project_name = self.project_name - asset_name = self.create_context.get_current_asset_name() + folder_path = self.create_context.get_current_folder_path() task_name = self.create_context.get_current_task_name() host_name = self.create_context.host_name @@ -41,42 +42,51 @@ class CreateWorkfile(AutoCreator): if instance.creator_identifier == self.identifier ), None) - if current_instance is None: - current_instance_asset = None - else: - current_instance_asset = current_instance["folderPath"] + current_folder_path = None + if current_instance is not None: + current_folder_path = current_instance["folderPath"] if current_instance is None: self.log.info("Auto-creating workfile instance...") - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": variant } current_instance = self.create_instance_in_context(product_name, data) elif ( - current_instance_asset != asset_name + current_folder_path != folder_path or current_instance["task"] != task_name ): # Update instance context if is not the same - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, ) - current_instance["folderPath"] = asset_name + current_instance["folderPath"] = folder_path current_instance["task"] = task_name current_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py index 48aa99d357..d5aac1191c 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py +++ b/client/ayon_core/hosts/substancepainter/plugins/load/load_mesh.py @@ -1,3 +1,5 @@ +import copy +from qtpy import QtWidgets, QtCore from ayon_core.pipeline import ( load, get_representation_path, @@ -8,65 +10,175 @@ from ayon_core.hosts.substancepainter.api.pipeline import ( set_container_metadata, remove_container_metadata ) -from ayon_core.hosts.substancepainter.api.lib import prompt_new_file_with_mesh import substance_painter.project -import qargparse + + +def _convert(substance_attr): + """Return Substance Painter Python API Project attribute from string. + + This converts a string like "ProjectWorkflow.Default" to for example + the Substance Painter Python API equivalent object, like: + `substance_painter.project.ProjectWorkflow.Default` + + Args: + substance_attr (str): The `substance_painter.project` attribute, + for example "ProjectWorkflow.Default" + + Returns: + Any: Substance Python API object of the project attribute. + + Raises: + ValueError: If attribute does not exist on the + `substance_painter.project` python api. + """ + root = substance_painter.project + for attr in substance_attr.split("."): + root = getattr(root, attr, None) + if root is None: + raise ValueError( + "Substance Painter project attribute" + f" does not exist: {substance_attr}") + + return root + + +def get_template_by_name(name: str, templates: list[dict]) -> dict: + return next( + template for template in templates + if template["name"] == name + ) + + +class SubstanceProjectConfigurationWindow(QtWidgets.QDialog): + """The pop-up dialog allows users to choose material + duplicate options for importing Max objects when updating + or switching assets. + """ + def __init__(self, project_templates): + super(SubstanceProjectConfigurationWindow, self).__init__() + self.setWindowFlags(self.windowFlags() | QtCore.Qt.FramelessWindowHint) + + self.configuration = None + self.template_names = [template["name"] for template + in project_templates] + self.project_templates = project_templates + + self.widgets = { + "label": QtWidgets.QLabel( + "Select your template for project configuration"), + "template_options": QtWidgets.QComboBox(), + "import_cameras": QtWidgets.QCheckBox("Import Cameras"), + "preserve_strokes": QtWidgets.QCheckBox("Preserve Strokes"), + "clickbox": QtWidgets.QWidget(), + "combobox": QtWidgets.QWidget(), + "buttons": QtWidgets.QDialogButtonBox( + QtWidgets.QDialogButtonBox.Ok + | QtWidgets.QDialogButtonBox.Cancel) + } + + self.widgets["template_options"].addItems(self.template_names) + + template_name = self.widgets["template_options"].currentText() + self._update_to_match_template(template_name) + # Build clickboxes + layout = QtWidgets.QHBoxLayout(self.widgets["clickbox"]) + layout.addWidget(self.widgets["import_cameras"]) + layout.addWidget(self.widgets["preserve_strokes"]) + # Build combobox + layout = QtWidgets.QHBoxLayout(self.widgets["combobox"]) + layout.addWidget(self.widgets["template_options"]) + # Build buttons + layout = QtWidgets.QHBoxLayout(self.widgets["buttons"]) + # Build layout. + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(self.widgets["label"]) + layout.addWidget(self.widgets["combobox"]) + layout.addWidget(self.widgets["clickbox"]) + layout.addWidget(self.widgets["buttons"]) + + self.widgets["template_options"].currentTextChanged.connect( + self._update_to_match_template) + self.widgets["buttons"].accepted.connect(self.on_accept) + self.widgets["buttons"].rejected.connect(self.on_reject) + + def on_accept(self): + self.configuration = self.get_project_configuration() + self.close() + + def on_reject(self): + self.close() + + def _update_to_match_template(self, template_name): + template = get_template_by_name(template_name, self.project_templates) + self.widgets["import_cameras"].setChecked(template["import_cameras"]) + self.widgets["preserve_strokes"].setChecked( + template["preserve_strokes"]) + + def get_project_configuration(self): + templates = self.project_templates + template_name = self.widgets["template_options"].currentText() + template = get_template_by_name(template_name, templates) + template = copy.deepcopy(template) # do not edit the original + template["import_cameras"] = self.widgets["import_cameras"].isChecked() + template["preserve_strokes"] = ( + self.widgets["preserve_strokes"].isChecked() + ) + for key in ["normal_map_format", + "project_workflow", + "tangent_space_mode"]: + template[key] = _convert(template[key]) + return template + + @classmethod + def prompt(cls, templates): + dialog = cls(templates) + dialog.exec_() + configuration = dialog.configuration + dialog.deleteLater() + return configuration class SubstanceLoadProjectMesh(load.LoaderPlugin): """Load mesh for project""" - families = ["*"] - representations = ["abc", "fbx", "obj", "gltf"] + product_types = {"*"} + representations = {"abc", "fbx", "obj", "gltf", "usd", "usda", "usdc"} label = "Load mesh" order = -10 icon = "code-fork" color = "orange" - options = [ - qargparse.Boolean( - "preserve_strokes", - default=True, - help="Preserve strokes positions on mesh.\n" - "(only relevant when loading into existing project)" - ), - qargparse.Boolean( - "import_cameras", - default=True, - help="Import cameras from the mesh file." - ) - ] + # Defined via settings + project_templates = [] - def load(self, context, name, namespace, data): + def load(self, context, name, namespace, options=None): # Get user inputs - import_cameras = data.get("import_cameras", True) - preserve_strokes = data.get("preserve_strokes", True) - sp_settings = substance_painter.project.Settings( - import_cameras=import_cameras - ) + result = SubstanceProjectConfigurationWindow.prompt( + self.project_templates) + if not result: + # cancelling loader action + return if not substance_painter.project.is_open(): # Allow to 'initialize' a new project path = self.filepath_from_context(context) - # TODO: improve the prompt dialog function to not - # only works for simple polygon scene - result = prompt_new_file_with_mesh(mesh_filepath=path) - if not result: - self.log.info("User cancelled new project prompt." - "Creating new project directly from" - " Substance Painter API Instead.") - settings = substance_painter.project.create( - mesh_file_path=path, settings=sp_settings - ) - + sp_settings = substance_painter.project.Settings( + import_cameras=result["import_cameras"], + normal_map_format=result["normal_map_format"], + project_workflow=result["project_workflow"], + tangent_space_mode=result["tangent_space_mode"], + default_texture_resolution=result["default_texture_resolution"] + ) + settings = substance_painter.project.create( + mesh_file_path=path, settings=sp_settings + ) else: # Reload the mesh settings = substance_painter.project.MeshReloadingSettings( - import_cameras=import_cameras, - preserve_strokes=preserve_strokes - ) + import_cameras=result["import_cameras"], + preserve_strokes=result["preserve_strokes"]) def on_mesh_reload(status: substance_painter.project.ReloadMeshStatus): # noqa if status == substance_painter.project.ReloadMeshStatus.SUCCESS: # noqa @@ -92,17 +204,18 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # from the user's original choice. We don't store 'preserve_strokes' # as we always preserve strokes on updates. container["options"] = { - "import_cameras": import_cameras, + "import_cameras": result["import_cameras"], } set_container_metadata(project_mesh_object_name, container) - def switch(self, container, representation): - self.update(container, representation) + def switch(self, container, context): + self.update(container, context) - def update(self, container, representation): + def update(self, container, context): + repre_entity = context["representation"] - path = get_representation_path(representation) + path = get_representation_path(repre_entity) # Reload the mesh container_options = container.get("options", {}) @@ -121,7 +234,7 @@ class SubstanceLoadProjectMesh(load.LoaderPlugin): # Update container representation object_name = container["objectName"] - update_data = {"representation": str(representation["_id"])} + update_data = {"representation": repre_entity["id"]} set_container_metadata(object_name, update_data, update=True) def remove(self, container): diff --git a/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py b/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py index 9cd77e8f90..20aaa56993 100644 --- a/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py +++ b/client/ayon_core/hosts/substancepainter/plugins/publish/collect_textureset_images.py @@ -1,16 +1,16 @@ import os import copy -import pyblish.api -from ayon_core.pipeline import publish +import pyblish.api +import ayon_api import substance_painter.textureset +from ayon_core.pipeline import publish from ayon_core.hosts.substancepainter.api.lib import ( get_parsed_export_maps, strip_template ) from ayon_core.pipeline.create import get_product_name -from ayon_core.client import get_asset_by_name class CollectTextureSet(pyblish.api.InstancePlugin): @@ -26,10 +26,17 @@ class CollectTextureSet(pyblish.api.InstancePlugin): def process(self, instance): config = self.get_export_config(instance) - asset_doc = get_asset_by_name( - instance.context.data["projectName"], + project_name = instance.context.data["projectName"] + folder_entity = ayon_api.get_folder_by_path( + project_name, instance.data["folderPath"] ) + task_name = instance.data.get("task") + task_entity = None + if folder_entity and task_name: + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) instance.data["exportConfig"] = config maps = get_parsed_export_maps(config) @@ -41,12 +48,12 @@ class CollectTextureSet(pyblish.api.InstancePlugin): for template, outputs in template_maps.items(): self.log.info(f"Processing {template}") self.create_image_instance(instance, template, outputs, - asset_doc=asset_doc, + task_entity=task_entity, texture_set_name=texture_set_name, stack_name=stack_name) def create_image_instance(self, instance, template, outputs, - asset_doc, texture_set_name, stack_name): + task_entity, texture_set_name, stack_name): """Create a new instance per image or UDIM sequence. The new instances will be of product type `image`. @@ -79,14 +86,19 @@ class CollectTextureSet(pyblish.api.InstancePlugin): map_identifier = strip_template(template) suffix += f".{map_identifier}" + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + image_product_name = get_product_name( # TODO: The product type actually isn't 'texture' currently but # for now this is only done so the product name starts with # 'texture' - project_name=context.data["projectName"], - asset_doc=asset_doc, - task_name=instance.data.get("task"), - host_name=context.data["hostName"], + context.data["projectName"], + task_name, + task_type, + context.data["hostName"], product_type="texture", variant=instance.data["variant"] + suffix, project_settings=context.data["project_settings"] @@ -126,7 +138,7 @@ class CollectTextureSet(pyblish.api.InstancePlugin): image_instance.data["representations"] = [representation] # Group the textures together in the loader - image_instance.data["subsetGroup"] = image_product_name + image_instance.data["productGroup"] = image_product_name # Store the texture set name and stack name on the instance image_instance.data["textureSetName"] = texture_set_name diff --git a/client/ayon_core/hosts/traypublisher/addon.py b/client/ayon_core/hosts/traypublisher/addon.py index 70bdfe9a64..3dd275f223 100644 --- a/client/ayon_core/hosts/traypublisher/addon.py +++ b/client/ayon_core/hosts/traypublisher/addon.py @@ -1,5 +1,6 @@ import os +from pathlib import Path from ayon_core.lib import get_ayon_launcher_args from ayon_core.lib.execute import run_detached_process from ayon_core.addon import ( @@ -57,3 +58,62 @@ def launch(): from ayon_core.tools import traypublisher traypublisher.main() + + +@cli_main.command() +@click_wrap.option( + "--filepath", + help="Full path to CSV file with data", + type=str, + required=True +) +@click_wrap.option( + "--project", + help="Project name in which the context will be used", + type=str, + required=True +) +@click_wrap.option( + "--folder-path", + help="Asset name in which the context will be used", + type=str, + required=True +) +@click_wrap.option( + "--task", + help="Task name under Asset in which the context will be used", + type=str, + required=False +) +@click_wrap.option( + "--ignore-validators", + help="Option to ignore validators", + type=bool, + is_flag=True, + required=False +) +def ingestcsv( + filepath, + project, + folder_path, + task, + ignore_validators +): + """Ingest CSV file into project. + + This command will ingest CSV file into project. CSV file must be in + specific format. See documentation for more information. + """ + from .csv_publish import csvpublish + + # use Path to check if csv_filepath exists + if not Path(filepath).exists(): + raise FileNotFoundError(f"File {filepath} does not exist.") + + csvpublish( + filepath, + project, + folder_path, + task, + ignore_validators + ) diff --git a/client/ayon_core/hosts/traypublisher/api/editorial.py b/client/ayon_core/hosts/traypublisher/api/editorial.py index 6153bc5752..c71dae336c 100644 --- a/client/ayon_core/hosts/traypublisher/api/editorial.py +++ b/client/ayon_core/hosts/traypublisher/api/editorial.py @@ -1,7 +1,8 @@ import re from copy import deepcopy -from ayon_core.client import get_asset_by_id +import ayon_api + from ayon_core.pipeline.create import CreatorError @@ -88,7 +89,7 @@ class ShotMetadataSolver: if not self.clip_name_tokenizer: return output_data - parent_name = source_data["selected_asset_doc"]["name"] + parent_name = source_data["selected_folder_entity"]["name"] search_text = parent_name + clip_name @@ -157,11 +158,11 @@ class ShotMetadataSolver: **_parent_tokens_formatting_data) except KeyError as _error: raise CreatorError(( - "Make sure all keys in settings are correct : \n\n" - f"`{_error}` from template string " - f"{shot_hierarchy['parents_path']}, " - f" has no equivalent in \n" - f"{list(_parent_tokens_formatting_data.keys())} parents" + "Make sure all keys in settings are correct:\n\n" + f"`{_error}` from template string" + f" {shot_hierarchy['parents_path']}," + f" has no equivalent in" + f"\n{list(_parent_tokens_formatting_data.keys())} parents" )) parent_token_name = ( @@ -174,7 +175,8 @@ class ShotMetadataSolver: # find parent type parent_token_type = _parent_tokens_type[parent_token_name] - # in case selected context is set to the same asset + # in case selected context is set to the same folder + # TODO keep index with 'parents' - name check is not enough if ( _index == 0 and parents[-1]["entity_name"] == parent_name @@ -184,14 +186,15 @@ class ShotMetadataSolver: # in case first parent is project then start parents from start if ( _index == 0 - and parent_token_type == "Project" + and parent_token_type.lower() == "project" ): project_parent = parents[0] parents = [project_parent] continue parents.append({ - "entity_type": parent_token_type, + "entity_type": "folder", + "folder_type": parent_token_type.lower(), "entity_name": parent_name }) @@ -209,58 +212,70 @@ class ShotMetadataSolver: return "/".join( [ p["entity_name"] for p in parents - if p["entity_type"] != "Project" + if p["entity_type"] != "project" ] ) if parents else "" - def _get_parents_from_selected_asset( + def _get_parents_from_selected_folder( self, - asset_doc, - project_doc + project_entity, + folder_entity, ): - """Returning parents from context on selected asset. + """Returning parents from context on selected folder. Context defined in Traypublisher project tree. Args: - asset_doc (db obj): selected asset doc - project_doc (db obj): actual project doc + project_entity (dict[str, Any]): Project entity. + folder_entity (dict[str, Any]): Selected folder entity. Returns: - list: list of dict parent components + list: list of dict parent components """ - project_name = project_doc["name"] - visual_hierarchy = [asset_doc] - current_doc = asset_doc - # looping through all available visual parents - # if they are not available anymore than it breaks - while True: - visual_parent_id = current_doc["data"]["visualParent"] - visual_parent = None - if visual_parent_id: - visual_parent = get_asset_by_id(project_name, visual_parent_id) + project_name = project_entity["name"] + path_entries = folder_entity["path"].split("/") + subpaths = [] + subpath_items = [] + for name in path_entries: + subpath_items.append(name) + if name: + subpaths.append("/".join(subpath_items)) + # Remove last name because we already have folder entity + subpaths.pop(-1) - if not visual_parent: - visual_hierarchy.append(project_doc) - break - visual_hierarchy.append(visual_parent) - current_doc = visual_parent + folder_entity_by_path = {} + if subpaths: + folder_entity_by_path = { + parent_folder["path"]: parent_folder + for parent_folder in ayon_api.get_folders( + project_name, folder_paths=subpaths + ) + } + folders_hierarchy = [ + folder_entity_by_path[folder_path] + for folder_path in subpaths + ] + folders_hierarchy.append(folder_entity) # add current selection context hierarchy - return [ - { - "entity_type": entity["data"]["entityType"], + output = [{ + "entity_type": "project", + "entity_name": project_name, + }] + for entity in folders_hierarchy: + output.append({ + "entity_type": "folder", + "folder_type": entity["folderType"], "entity_name": entity["name"] - } - for entity in reversed(visual_hierarchy) - ] + }) + return output - def _generate_tasks_from_settings(self, project_doc): + def _generate_tasks_from_settings(self, project_entity): """Convert settings inputs to task data. Args: - project_doc (db obj): actual project doc + project_entity (dict): Project entity. Raises: KeyError: Missing task type in project doc @@ -270,19 +285,23 @@ class ShotMetadataSolver: """ tasks_to_add = {} - project_task_types = project_doc["config"]["tasks"] + project_task_types = project_entity["taskTypes"] + task_type_names = { + task_type["name"] + for task_type in project_task_types + } for task_item in self.shot_add_tasks: task_name = task_item["name"] task_type = task_item["task_type"] # check if task type in project task types - if task_type not in project_task_types.keys(): + if task_type not in task_type_names: raise KeyError( "Missing task type `{}` for `{}` is not" " existing in `{}``".format( task_type, task_name, - list(project_task_types.keys()) + list(task_type_names) ) ) tasks_to_add[task_name] = {"type": task_type} @@ -303,8 +322,8 @@ class ShotMetadataSolver: """ tasks = {} - asset_doc = source_data["selected_asset_doc"] - project_doc = source_data["project_doc"] + folder_entity = source_data["selected_folder_entity"] + project_entity = source_data["project_entity"] # match clip to shot name at start shot_name = clip_name @@ -312,8 +331,10 @@ class ShotMetadataSolver: # parse all tokens and generate formatting data formatting_data = self._generate_tokens(shot_name, source_data) - # generate parents from selected asset - parents = self._get_parents_from_selected_asset(asset_doc, project_doc) + # generate parents from selected folder + parents = self._get_parents_from_selected_folder( + project_entity, folder_entity + ) if self.shot_rename["enabled"]: shot_name = self._rename_template(formatting_data) @@ -325,7 +346,7 @@ class ShotMetadataSolver: if self.shot_add_tasks: tasks = self._generate_tasks_from_settings( - project_doc) + project_entity) # generate hierarchy path from parents hierarchy_path = self._create_hierarchy_path(parents) diff --git a/client/ayon_core/hosts/traypublisher/api/plugin.py b/client/ayon_core/hosts/traypublisher/api/plugin.py index be50383510..257d01eb50 100644 --- a/client/ayon_core/hosts/traypublisher/api/plugin.py +++ b/client/ayon_core/hosts/traypublisher/api/plugin.py @@ -1,9 +1,5 @@ -from ayon_core.client import ( - get_assets, - get_subsets, - get_last_versions, - get_asset_name_identifier, -) +import ayon_api + from ayon_core.lib.attribute_definitions import ( FileDef, BoolDef, @@ -117,10 +113,12 @@ class SettingsCreator(TrayPublishCreator): # Fill 'version_to_use' if version control is enabled if self.allow_version_control: - asset_name = data["folderPath"] - subset_docs_by_asset_id = self._prepare_next_versions( - [asset_name], [product_name]) - version = subset_docs_by_asset_id[asset_name].get(product_name) + folder_path = data["folderPath"] + product_entities_by_folder_path = self._prepare_next_versions( + [folder_path], [product_name]) + version = product_entities_by_folder_path[folder_path].get( + product_name + ) pre_create_data["version_to_use"] = version data["_previous_last_version"] = version @@ -137,67 +135,69 @@ class SettingsCreator(TrayPublishCreator): if thumbnail_path: self.set_instance_thumbnail_path(new_instance.id, thumbnail_path) - def _prepare_next_versions(self, asset_names, product_names): - """Prepare next versions for given asset and product names. + def _prepare_next_versions(self, folder_paths, product_names): + """Prepare next versions for given folder and product names. Todos: - Expect combination of product names by asset name to avoid + Expect combination of product names by folder path to avoid unnecessary server calls for unused products. Args: - asset_names (Iterable[str]): Asset names. - product_names (Iterable[str]): Subset names. + folder_paths (Iterable[str]): Folder paths. + product_names (Iterable[str]): Product names. Returns: - dict[str, dict[str, int]]: Last versions by asset + dict[str, dict[str, int]]: Last versions by fodler path and product names. """ # Prepare all versions for all combinations to '1' # TODO use 'ayon_core.pipeline.version_start' logic - subset_docs_by_asset_id = { - asset_name: { + product_entities_by_folder_path = { + folder_path: { product_name: 1 for product_name in product_names } - for asset_name in asset_names + for folder_path in folder_paths } - if not asset_names or not product_names: - return subset_docs_by_asset_id + if not folder_paths or not product_names: + return product_entities_by_folder_path - asset_docs = get_assets( + folder_entities = ayon_api.get_folders( self.project_name, - asset_names=asset_names, - fields=["_id", "name", "data.parents"] + folder_paths=folder_paths, + fields={"id", "path"} ) - asset_names_by_id = { - asset_doc["_id"]: get_asset_name_identifier(asset_doc) - for asset_doc in asset_docs + folder_paths_by_id = { + folder_entity["id"]: folder_entity["path"] + for folder_entity in folder_entities } - subset_docs = list(get_subsets( + product_entities = list(ayon_api.get_products( self.project_name, - asset_ids=asset_names_by_id.keys(), - subset_names=product_names, - fields=["_id", "name", "parent"] + folder_ids=folder_paths_by_id.keys(), + product_names=product_names, + fields={"id", "name", "folderId"} )) - product_ids = {subset_doc["_id"] for subset_doc in subset_docs} - last_versions = get_last_versions( + product_ids = {p["id"] for p in product_entities} + last_versions = ayon_api.get_last_versions( self.project_name, product_ids, - fields=["name", "parent"]) + fields={"version", "productId"}) - for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - asset_name = asset_names_by_id[asset_id] - product_name = subset_doc["name"] - product_id = subset_doc["_id"] + for product_entity in product_entities: + product_id = product_entity["id"] + product_name = product_entity["name"] + folder_id = product_entity["folderId"] + folder_path = folder_paths_by_id[folder_id] last_version = last_versions.get(product_id) version = 0 if last_version is not None: - version = last_version["name"] - subset_docs_by_asset_id[asset_name][product_name] += version - return subset_docs_by_asset_id + version = last_version["version"] + product_entities_by_folder_path[folder_path][product_name] += ( + version + ) + return product_entities_by_folder_path def _fill_next_versions(self, instances_data): """Fill next version for instances. @@ -222,20 +222,20 @@ class SettingsCreator(TrayPublishCreator): ): filtered_instance_data.append(instance) - asset_names = { + folder_paths = { instance["folderPath"] for instance in filtered_instance_data } product_names = { instance["productName"] for instance in filtered_instance_data} - subset_docs_by_asset_id = self._prepare_next_versions( - asset_names, product_names + product_entities_by_folder_path = self._prepare_next_versions( + folder_paths, product_names ) for instance in filtered_instance_data: - asset_name = instance["folderPath"] + folder_path = instance["folderPath"] product_name = instance["productName"] - version = subset_docs_by_asset_id[asset_name][product_name] + version = product_entities_by_folder_path[folder_path][product_name] instance["creator_attributes"]["version_to_use"] = version instance["_previous_last_version"] = version diff --git a/client/ayon_core/hosts/traypublisher/batch_parsing.py b/client/ayon_core/hosts/traypublisher/batch_parsing.py index fdb3021a20..fa3c8d5b9a 100644 --- a/client/ayon_core/hosts/traypublisher/batch_parsing.py +++ b/client/ayon_core/hosts/traypublisher/batch_parsing.py @@ -2,13 +2,18 @@ import os import re +import ayon_api + from ayon_core.lib import Logger -from ayon_core.client import get_assets, get_asset_by_name -def get_asset_doc_from_file_name(source_filename, project_name, - version_regex, all_selected_asset_ids=None): - """Try to parse out asset name from file name provided. +def get_folder_entity_from_filename( + project_name, + source_filename, + version_regex, + all_selected_folder_ids=None +): + """Try to parse out folder name from file name provided. Artists might provide various file name formats. Currently handled: @@ -17,72 +22,101 @@ def get_asset_doc_from_file_name(source_filename, project_name, - my_chair_to_upload.mov """ version = None - asset_name = os.path.splitext(source_filename)[0] - # Always first check if source filename is directly asset (eg. 'chair.mov') - matching_asset_doc = get_asset_by_name_case_not_sensitive( - project_name, asset_name, all_selected_asset_ids) + folder_name = os.path.splitext(source_filename)[0] + # Always first check if source filename is directly folder + # (eg. 'chair.mov') + matching_folder_entity = get_folder_by_name_case_not_sensitive( + project_name, folder_name, all_selected_folder_ids) - if matching_asset_doc is None: + if matching_folder_entity is None: # name contains also a version - matching_asset_doc, version = ( - parse_with_version(project_name, asset_name, version_regex, - all_selected_asset_ids)) + matching_folder_entity, version = ( + parse_with_version( + project_name, + folder_name, + version_regex, + all_selected_folder_ids + ) + ) - if matching_asset_doc is None: - matching_asset_doc = parse_containing(project_name, asset_name, - all_selected_asset_ids) + if matching_folder_entity is None: + matching_folder_entity = parse_containing( + project_name, + folder_name, + all_selected_folder_ids + ) - return matching_asset_doc, version + return matching_folder_entity, version -def parse_with_version(project_name, asset_name, version_regex, - all_selected_asset_ids=None, log=None): - """Try to parse asset name from a file name containing version too +def parse_with_version( + project_name, + folder_name, + version_regex, + all_selected_folder_ids=None, + log=None +): + """Try to parse folder name from a file name containing version too Eg. 'chair_v001.mov' >> 'chair', 1 """ if not log: log = Logger.get_logger(__name__) log.debug( - ("Asset doc by \"{}\" was not found, trying version regex.". - format(asset_name))) + ("Folder entity by \"{}\" was not found, trying version regex.". + format(folder_name))) - matching_asset_doc = version_number = None + matching_folder_entity = version_number = None - regex_result = version_regex.findall(asset_name) + regex_result = version_regex.findall(folder_name) if regex_result: - _asset_name, _version_number = regex_result[0] - matching_asset_doc = get_asset_by_name_case_not_sensitive( - project_name, _asset_name, - all_selected_asset_ids=all_selected_asset_ids) - if matching_asset_doc: + _folder_name, _version_number = regex_result[0] + matching_folder_entity = get_folder_by_name_case_not_sensitive( + project_name, + _folder_name, + all_selected_folder_ids=all_selected_folder_ids + ) + if matching_folder_entity: version_number = int(_version_number) - return matching_asset_doc, version_number + return matching_folder_entity, version_number -def parse_containing(project_name, asset_name, all_selected_asset_ids=None): - """Look if file name contains any existing asset name""" - for asset_doc in get_assets(project_name, asset_ids=all_selected_asset_ids, - fields=["name"]): - if asset_doc["name"].lower() in asset_name.lower(): - return get_asset_by_name(project_name, asset_doc["name"]) +def parse_containing(project_name, folder_name, all_selected_folder_ids=None): + """Look if file name contains any existing folder name""" + for folder_entity in ayon_api.get_folders( + project_name, + folder_ids=all_selected_folder_ids, + fields={"id", "name"} + ): + if folder_entity["name"].lower() in folder_name.lower(): + return ayon_api.get_folder_by_id( + project_name, + folder_entity["id"] + ) -def get_asset_by_name_case_not_sensitive(project_name, asset_name, - all_selected_asset_ids=None, - log=None): +def get_folder_by_name_case_not_sensitive( + project_name, + folder_name, + all_selected_folder_ids=None, + log=None +): """Handle more cases in file names""" if not log: log = Logger.get_logger(__name__) - asset_name = re.compile(asset_name, re.IGNORECASE) + folder_name = re.compile(folder_name, re.IGNORECASE) - assets = list(get_assets(project_name, asset_ids=all_selected_asset_ids, - asset_names=[asset_name])) - if assets: - if len(assets) > 1: - log.warning("Too many records found for {}".format( - asset_name)) - return + folder_entities = list(ayon_api.get_folders( + project_name, + folder_ids=all_selected_folder_ids, + folder_names=[folder_name] + )) - return assets.pop() + if len(folder_entities) > 1: + log.warning("Too many records found for {}".format( + folder_name)) + return None + + if folder_entities: + return folder_entities.pop() diff --git a/client/ayon_core/hosts/traypublisher/csv_publish.py b/client/ayon_core/hosts/traypublisher/csv_publish.py new file mode 100644 index 0000000000..b43792a357 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/csv_publish.py @@ -0,0 +1,86 @@ +import os + +import pyblish.api +import pyblish.util + +from ayon_api import get_folder_by_path, get_task_by_name +from ayon_core.lib.attribute_definitions import FileDefItem +from ayon_core.pipeline import install_host +from ayon_core.pipeline.create import CreateContext + +from ayon_core.hosts.traypublisher.api import TrayPublisherHost + + +def csvpublish( + filepath, + project_name, + folder_path, + task_name=None, + ignore_validators=False +): + """Publish CSV file. + + Args: + filepath (str): Path to CSV file. + project_name (str): Project name. + folder_path (str): Folder path. + task_name (Optional[str]): Task name. + ignore_validators (Optional[bool]): Option to ignore validators. + """ + + # initialization of host + host = TrayPublisherHost() + install_host(host) + + # setting host context into project + host.set_project_name(project_name) + + # form precreate data with field values + file_field = FileDefItem.from_paths([filepath], False).pop().to_dict() + precreate_data = { + "csv_filepath_data": file_field, + } + + # create context initialization + create_context = CreateContext(host, headless=True) + folder_entity = get_folder_by_path( + project_name, + folder_path=folder_path, + ) + + if not folder_entity: + ValueError( + f"Folder path '{folder_path}' doesn't " + f"exists at project '{project_name}'." + ) + + task_entity = get_task_by_name( + project_name, + folder_entity["id"], + task_name, + ) + + if not task_entity: + ValueError( + f"Task name '{task_name}' doesn't " + f"exists at folder '{folder_path}'." + ) + + create_context.create( + "io.ayon.creators.traypublisher.csv_ingest", + "Main", + folder_entity=folder_entity, + task_entity=task_entity, + pre_create_data=precreate_data, + ) + + # publishing context initialization + pyblish_context = pyblish.api.Context() + pyblish_context.data["create_context"] = create_context + + # redefine targets (skip 'local' to disable validators) + if ignore_validators: + targets = ["default", "ingest"] + + # publishing + pyblish.util.publish(context=pyblish_context, targets=targets) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py index 5c913b3289..4d865c1c5c 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_colorspace_look.py @@ -5,8 +5,8 @@ This creator is used to publish colorspace look files thanks to production type `ociolook`. All files are published as representation. """ from pathlib import Path +import ayon_api -from ayon_core.client import get_asset_by_name from ayon_core.lib.attribute_definitions import ( FileDef, EnumDef, TextDef, UISeparatorDef ) @@ -54,14 +54,21 @@ This creator publishes color space look file (LUT). # this should never happen raise CreatorError("Missing files from representation") - asset_name = instance_data["folderPath"] - asset_doc = get_asset_by_name( - self.project_name, asset_name) + folder_path = instance_data["folderPath"] + task_name = instance_data["task"] + folder_entity = ayon_api.get_folder_by_path( + self.project_name, folder_path) + + task_entity = None + if task_name: + task_entity = ayon_api.get_task_by_name( + self.project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name=self.project_name, - asset_doc=asset_doc, - task_name=instance_data["task"] or "Not set", + folder_entity=folder_entity, + task_entity=task_entity, variant=instance_data["variant"], ) diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py new file mode 100644 index 0000000000..8143e8b45b --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_csv_ingest.py @@ -0,0 +1,741 @@ +import os +import re +import csv +import clique +from io import StringIO +from copy import deepcopy, copy + +from ayon_api import get_folder_by_path, get_task_by_name +from ayon_core.pipeline.create import get_product_name +from ayon_core.pipeline import CreatedInstance +from ayon_core.lib import FileDef, BoolDef +from ayon_core.lib.transcoding import ( + VIDEO_EXTENSIONS, IMAGE_EXTENSIONS +) +from ayon_core.pipeline.create import CreatorError +from ayon_core.hosts.traypublisher.api.plugin import ( + TrayPublishCreator +) + + +class IngestCSV(TrayPublishCreator): + """CSV ingest creator class""" + + icon = "fa.file" + + label = "CSV Ingest" + product_type = "csv_ingest_file" + identifier = "io.ayon.creators.traypublisher.csv_ingest" + + default_variants = ["Main"] + + description = "Ingest products' data from CSV file" + detailed_description = """ +Ingest products' data from CSV file following column and representation +configuration in project settings. +""" + + # Position in the list of creators. + order = 10 + + # settings for this creator + columns_config = {} + representations_config = {} + + def create(self, subset_name, instance_data, pre_create_data): + """Create an product from each row found in the CSV. + + Args: + subset_name (str): The subset name. + instance_data (dict): The instance data. + pre_create_data (dict): + """ + + csv_filepath_data = pre_create_data.get("csv_filepath_data", {}) + + folder = csv_filepath_data.get("directory", "") + if not os.path.exists(folder): + raise CreatorError( + f"Directory '{folder}' does not exist." + ) + filename = csv_filepath_data.get("filenames", []) + self._process_csv_file(subset_name, instance_data, folder, filename[0]) + + def _process_csv_file( + self, subset_name, instance_data, staging_dir, filename): + """Process CSV file. + + Args: + subset_name (str): The subset name. + instance_data (dict): The instance data. + staging_dir (str): The staging directory. + filename (str): The filename. + """ + + # create new instance from the csv file via self function + self._pass_data_to_csv_instance( + instance_data, + staging_dir, + filename + ) + + csv_instance = CreatedInstance( + self.product_type, subset_name, instance_data, self + ) + self._store_new_instance(csv_instance) + + csv_instance["csvFileData"] = { + "filename": filename, + "staging_dir": staging_dir, + } + + # from special function get all data from csv file and convert them + # to new instances + csv_data_for_instances = self._get_data_from_csv( + staging_dir, filename) + + # create instances from csv data via self function + self._create_instances_from_csv_data( + csv_data_for_instances, staging_dir + ) + + def _create_instances_from_csv_data( + self, + csv_data_for_instances, + staging_dir + ): + """Create instances from csv data""" + + for folder_path, prepared_data in csv_data_for_instances.items(): + project_name = self.create_context.get_current_project_name() + products = prepared_data["products"] + + for instance_name, product_data in products.items(): + # get important instance variables + task_name = product_data["task_name"] + task_type = product_data["task_type"] + variant = product_data["variant"] + product_type = product_data["product_type"] + version = product_data["version"] + + # create subset/product name + product_name = get_product_name( + project_name, + task_name, + task_type, + self.host_name, + product_type, + variant + ) + + # make sure frame start/end is inherited from csv columns + # expected frame range data are handles excluded + for _, repre_data in product_data["representations"].items(): # noqa: E501 + frame_start = repre_data["frameStart"] + frame_end = repre_data["frameEnd"] + handle_start = repre_data["handleStart"] + handle_end = repre_data["handleEnd"] + fps = repre_data["fps"] + break + + # try to find any version comment in representation data + version_comment = next( + iter( + repre_data["comment"] + for repre_data in product_data["representations"].values() # noqa: E501 + if repre_data["comment"] + ), + None + ) + + # try to find any slate switch in representation data + slate_exists = any( + repre_data["slate"] + for _, repre_data in product_data["representations"].items() # noqa: E501 + ) + + # get representations from product data + representations = product_data["representations"] + label = f"{folder_path}_{product_name}_v{version:>03}" + + families = ["csv_ingest"] + if slate_exists: + # adding slate to families mainly for loaders to be able + # to filter out slates + families.append("slate") + + # make product data + product_data = { + "name": instance_name, + "folderPath": folder_path, + "families": families, + "label": label, + "task": task_name, + "variant": variant, + "source": "csv", + "frameStart": frame_start, + "frameEnd": frame_end, + "handleStart": handle_start, + "handleEnd": handle_end, + "fps": fps, + "version": version, + "comment": version_comment, + } + + # create new instance + new_instance = CreatedInstance( + product_type, product_name, product_data, self + ) + self._store_new_instance(new_instance) + + if not new_instance.get("prepared_data_for_repres"): + new_instance["prepared_data_for_repres"] = [] + + base_thumbnail_repre_data = { + "name": "thumbnail", + "ext": None, + "files": None, + "stagingDir": None, + "stagingDir_persistent": True, + "tags": ["thumbnail", "delete"], + } + # need to populate all thumbnails for all representations + # so we can check if unique thumbnail per representation + # is needed + thumbnails = [ + repre_data["thumbnailPath"] + for repre_data in representations.values() + if repre_data["thumbnailPath"] + ] + multiple_thumbnails = len(set(thumbnails)) > 1 + explicit_output_name = None + thumbnails_processed = False + for filepath, repre_data in representations.items(): + # check if any review derivate tag is present + reviewable = any( + tag for tag in repre_data.get("tags", []) + # tag can be `ftrackreview` or `review` + if "review" in tag + ) + # since we need to populate multiple thumbnails as + # representation with outputName for (Ftrack instance + # integrator) pairing with reviewable video representations + if ( + thumbnails + and multiple_thumbnails + and reviewable + ): + # multiple unique thumbnails per representation needs + # grouping by outputName + # mainly used in Ftrack instance integrator + explicit_output_name = repre_data["representationName"] + relative_thumbnail_path = repre_data["thumbnailPath"] + # representation might not have thumbnail path + # so ignore this one + if not relative_thumbnail_path: + continue + thumb_dir, thumb_file = \ + self._get_refactor_thumbnail_path( + staging_dir, relative_thumbnail_path) + filename, ext = os.path.splitext(thumb_file) + thumbnail_repr_data = deepcopy( + base_thumbnail_repre_data) + thumbnail_repr_data.update({ + "name": "thumbnail_{}".format(filename), + "ext": ext[1:], + "files": thumb_file, + "stagingDir": thumb_dir, + "outputName": explicit_output_name, + }) + new_instance["prepared_data_for_repres"].append({ + "type": "thumbnail", + "colorspace": None, + "representation": thumbnail_repr_data, + }) + # also add thumbnailPath for ayon to integrate + if not new_instance.get("thumbnailPath"): + new_instance["thumbnailPath"] = ( + os.path.join(thumb_dir, thumb_file) + ) + elif ( + thumbnails + and not multiple_thumbnails + and not thumbnails_processed + or not reviewable + ): + """ + For case where we have only one thumbnail + and not reviewable medias. This needs to be processed + only once per instance. + """ + if not thumbnails: + continue + # here we will use only one thumbnail for + # all representations + relative_thumbnail_path = repre_data["thumbnailPath"] + # popping last thumbnail from list since it is only one + # and we do not need to iterate again over it + if not relative_thumbnail_path: + relative_thumbnail_path = thumbnails.pop() + thumb_dir, thumb_file = \ + self._get_refactor_thumbnail_path( + staging_dir, relative_thumbnail_path) + _, ext = os.path.splitext(thumb_file) + thumbnail_repr_data = deepcopy( + base_thumbnail_repre_data) + thumbnail_repr_data.update({ + "ext": ext[1:], + "files": thumb_file, + "stagingDir": thumb_dir + }) + new_instance["prepared_data_for_repres"].append({ + "type": "thumbnail", + "colorspace": None, + "representation": thumbnail_repr_data, + }) + # also add thumbnailPath for ayon to integrate + if not new_instance.get("thumbnailPath"): + new_instance["thumbnailPath"] = ( + os.path.join(thumb_dir, thumb_file) + ) + + thumbnails_processed = True + + # get representation data + representation_data = self._get_representation_data( + filepath, repre_data, staging_dir, + explicit_output_name + ) + + new_instance["prepared_data_for_repres"].append({ + "type": "media", + "colorspace": repre_data["colorspace"], + "representation": representation_data, + }) + + def _get_refactor_thumbnail_path( + self, staging_dir, relative_thumbnail_path): + thumbnail_abs_path = os.path.join( + staging_dir, relative_thumbnail_path) + return os.path.split( + thumbnail_abs_path) + + def _get_representation_data( + self, filepath, repre_data, staging_dir, explicit_output_name=None + ): + """Get representation data + + Args: + filepath (str): Filepath to representation file. + repre_data (dict): Representation data from CSV file. + staging_dir (str): Staging directory. + explicit_output_name (Optional[str]): Explicit output name. + For grouping purposes with reviewable components. + Defaults to None. + """ + + # get extension of file + basename = os.path.basename(filepath) + extension = os.path.splitext(filepath)[-1].lower() + + # validate filepath is having correct extension based on output + repre_name = repre_data["representationName"] + repre_config_data = None + for repre in self.representations_config["representations"]: + if repre["name"] == repre_name: + repre_config_data = repre + break + + if not repre_config_data: + raise CreatorError( + f"Representation '{repre_name}' not found " + "in config representation data." + ) + + validate_extensions = repre_config_data["extensions"] + if extension not in validate_extensions: + raise CreatorError( + f"File extension '{extension}' not valid for " + f"output '{validate_extensions}'." + ) + + is_sequence = (extension in IMAGE_EXTENSIONS) + # convert ### string in file name to %03d + # this is for correct frame range validation + # example: file.###.exr -> file.%03d.exr + if "#" in basename: + padding = len(basename.split("#")) - 1 + basename = basename.replace("#" * padding, f"%0{padding}d") + is_sequence = True + + # make absolute path to file + absfilepath = os.path.normpath(os.path.join(staging_dir, filepath)) + dirname = os.path.dirname(absfilepath) + + # check if dirname exists + if not os.path.isdir(dirname): + raise CreatorError( + f"Directory '{dirname}' does not exist." + ) + + # collect all data from dirname + paths_for_collection = [] + for file in os.listdir(dirname): + filepath = os.path.join(dirname, file) + paths_for_collection.append(filepath) + + collections, _ = clique.assemble(paths_for_collection) + + if collections: + collections = collections[0] + else: + if is_sequence: + raise CreatorError( + f"No collections found in directory '{dirname}'." + ) + + frame_start = None + frame_end = None + if is_sequence: + files = [os.path.basename(file) for file in collections] + frame_start = list(collections.indexes)[0] + frame_end = list(collections.indexes)[-1] + else: + files = basename + + tags = deepcopy(repre_data["tags"]) + # if slate in repre_data is True then remove one frame from start + if repre_data["slate"]: + tags.append("has_slate") + + # get representation data + representation_data = { + "name": repre_name, + "ext": extension[1:], + "files": files, + "stagingDir": dirname, + "stagingDir_persistent": True, + "tags": tags, + } + if extension in VIDEO_EXTENSIONS: + representation_data.update({ + "fps": repre_data["fps"], + "outputName": repre_name, + }) + + if explicit_output_name: + representation_data["outputName"] = explicit_output_name + + if frame_start: + representation_data["frameStart"] = frame_start + if frame_end: + representation_data["frameEnd"] = frame_end + + return representation_data + + def _get_data_from_csv( + self, package_dir, filename + ): + """Generate instances from the csv file""" + # get current project name and code from context.data + project_name = self.create_context.get_current_project_name() + + csv_file_path = os.path.join( + package_dir, filename + ) + + # make sure csv file contains columns from following list + required_columns = [ + column["name"] for column in self.columns_config["columns"] + if column["required_column"] + ] + + # read csv file + with open(csv_file_path, "r") as csv_file: + csv_content = csv_file.read() + + # read csv file with DictReader + csv_reader = csv.DictReader( + StringIO(csv_content), + delimiter=self.columns_config["csv_delimiter"] + ) + + # fix fieldnames + # sometimes someone can keep extra space at the start or end of + # the column name + all_columns = [ + " ".join(column.rsplit()) for column in csv_reader.fieldnames] + + # return back fixed fieldnames + csv_reader.fieldnames = all_columns + + # check if csv file contains all required columns + if any(column not in all_columns for column in required_columns): + raise CreatorError( + f"Missing required columns: {required_columns}" + ) + + csv_data = {} + # get data from csv file + for row in csv_reader: + # Get required columns first + # TODO: will need to be folder path in CSV + # TODO: `context_asset_name` is now `folder_path` + folder_path = self._get_row_value_with_validation( + "Folder Path", row) + task_name = self._get_row_value_with_validation( + "Task Name", row) + version = self._get_row_value_with_validation( + "Version", row) + + # Get optional columns + variant = self._get_row_value_with_validation( + "Variant", row) + product_type = self._get_row_value_with_validation( + "Product Type", row) + + pre_product_name = ( + f"{task_name}{variant}{product_type}" + f"{version}".replace(" ", "").lower() + ) + + # get representation data + filename, representation_data = \ + self._get_representation_row_data(row) + + # TODO: batch query of all folder paths and task names + + # get folder entity from folder path + folder_entity = get_folder_by_path( + project_name, folder_path) + + # make sure asset exists + if not folder_entity: + raise CreatorError( + f"Asset '{folder_path}' not found." + ) + + # first get all tasks on the folder entity and then find + task_entity = get_task_by_name( + project_name, folder_entity["id"], task_name) + + # check if task name is valid task in asset doc + if not task_entity: + raise CreatorError( + f"Task '{task_name}' not found in asset doc." + ) + + # get all csv data into one dict and make sure there are no + # duplicates data are already validated and sorted under + # correct existing asset also check if asset exists and if + # task name is valid task in asset doc and representations + # are distributed under products following variants + if folder_path not in csv_data: + csv_data[folder_path] = { + "folder_entity": folder_entity, + "products": { + pre_product_name: { + "task_name": task_name, + "task_type": task_entity["taskType"], + "variant": variant, + "product_type": product_type, + "version": version, + "representations": { + filename: representation_data, + }, + } + } + } + else: + csv_products = csv_data[folder_path]["products"] + if pre_product_name not in csv_products: + csv_products[pre_product_name] = { + "task_name": task_name, + "task_type": task_entity["taskType"], + "variant": variant, + "product_type": product_type, + "version": version, + "representations": { + filename: representation_data, + }, + } + else: + csv_representations = \ + csv_products[pre_product_name]["representations"] + if filename in csv_representations: + raise CreatorError( + f"Duplicate filename '{filename}' in csv file." + ) + csv_representations[filename] = representation_data + + return csv_data + + def _get_representation_row_data(self, row_data): + """Get representation row data""" + # Get required columns first + file_path = self._get_row_value_with_validation( + "File Path", row_data) + frame_start = self._get_row_value_with_validation( + "Frame Start", row_data) + frame_end = self._get_row_value_with_validation( + "Frame End", row_data) + handle_start = self._get_row_value_with_validation( + "Handle Start", row_data) + handle_end = self._get_row_value_with_validation( + "Handle End", row_data) + fps = self._get_row_value_with_validation( + "FPS", row_data) + + # Get optional columns + thumbnail_path = self._get_row_value_with_validation( + "Version Thumbnail", row_data) + colorspace = self._get_row_value_with_validation( + "Representation Colorspace", row_data) + comment = self._get_row_value_with_validation( + "Version Comment", row_data) + repre = self._get_row_value_with_validation( + "Representation", row_data) + slate_exists = self._get_row_value_with_validation( + "Slate Exists", row_data) + repre_tags = self._get_row_value_with_validation( + "Representation Tags", row_data) + + # convert tags value to list + tags_list = copy(self.representations_config["default_tags"]) + if repre_tags: + tags_list = [] + tags_delimiter = self.representations_config["tags_delimiter"] + # strip spaces from repre_tags + if tags_delimiter in repre_tags: + tags = repre_tags.split(tags_delimiter) + for _tag in tags: + tags_list.append(("".join(_tag.strip())).lower()) + else: + tags_list.append(repre_tags) + + representation_data = { + "colorspace": colorspace, + "comment": comment, + "representationName": repre, + "slate": slate_exists, + "tags": tags_list, + "thumbnailPath": thumbnail_path, + "frameStart": int(frame_start), + "frameEnd": int(frame_end), + "handleStart": int(handle_start), + "handleEnd": int(handle_end), + "fps": float(fps), + } + return file_path, representation_data + + def _get_row_value_with_validation( + self, column_name, row_data, default_value=None + ): + """Get row value with validation""" + + # get column data from column config + column_data = None + for column in self.columns_config["columns"]: + if column["name"] == column_name: + column_data = column + break + + if not column_data: + raise CreatorError( + f"Column '{column_name}' not found in column config." + ) + + # get column value from row + column_value = row_data.get(column_name) + column_required = column_data["required_column"] + + # check if column value is not empty string and column is required + if column_value == "" and column_required: + raise CreatorError( + f"Value in column '{column_name}' is required." + ) + + # get column type + column_type = column_data["type"] + # get column validation regex + column_validation = column_data["validation_pattern"] + # get column default value + column_default = default_value or column_data["default"] + + if column_type in ["number", "decimal"] and column_default == 0: + column_default = None + + # check if column value is not empty string + if column_value == "": + # set default value if column value is empty string + column_value = column_default + + # set column value to correct type following column type + if column_type == "number" and column_value is not None: + column_value = int(column_value) + elif column_type == "decimal" and column_value is not None: + column_value = float(column_value) + elif column_type == "bool": + column_value = column_value in ["true", "True"] + + # check if column value matches validation regex + if ( + column_value is not None and + not re.match(str(column_validation), str(column_value)) + ): + raise CreatorError( + f"Column '{column_name}' value '{column_value}' " + f"does not match validation regex '{column_validation}' \n" + f"Row data: {row_data} \n" + f"Column data: {column_data}" + ) + + return column_value + + def _pass_data_to_csv_instance( + self, instance_data, staging_dir, filename + ): + """Pass CSV representation file to instance data""" + + representation = { + "name": "csv", + "ext": "csv", + "files": filename, + "stagingDir": staging_dir, + "stagingDir_persistent": True, + } + + instance_data.update({ + "label": f"CSV: {filename}", + "representations": [representation], + "stagingDir": staging_dir, + "stagingDir_persistent": True, + }) + + def get_instance_attr_defs(self): + return [ + BoolDef( + "add_review_family", + default=True, + label="Review" + ) + ] + + def get_pre_create_attr_defs(self): + """Creating pre-create attributes at creator plugin. + + Returns: + list: list of attribute object instances + """ + # Use same attributes as for instance attributes + attr_defs = [ + FileDef( + "csv_filepath_data", + folders=False, + extensions=[".csv"], + allow_sequences=False, + single_item=True, + label="CSV File", + ), + ] + return attr_defs diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py index a7abd3e6db..4057aee9a6 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_editorial.py @@ -1,11 +1,9 @@ import os from copy import deepcopy + +import ayon_api import opentimelineio as otio -from ayon_core.client import ( - get_asset_by_name, - get_project -) from ayon_core.hosts.traypublisher.api.plugin import ( TrayPublishCreator, HiddenTrayPublishCreator @@ -196,12 +194,14 @@ or updating already created. Publishing will create OTIO file. if k not in product_types } - asset_name = instance_data["folderPath"] - asset_doc = get_asset_by_name(self.project_name, asset_name) + folder_path = instance_data["folderPath"] + folder_entity = ayon_api.get_folder_by_path( + self.project_name, folder_path + ) if pre_create_data["fps"] == "from_selection": - # get asset doc data attributes - fps = asset_doc["data"]["fps"] + # get 'fps' from folder attributes + fps = folder_entity["attrib"]["fps"] else: fps = float(pre_create_data["fps"]) @@ -226,18 +226,18 @@ or updating already created. Publishing will create OTIO file. # Create all clip instances clip_instance_properties.update({ "fps": fps, - "parent_asset_name": asset_name, "variant": instance_data["variant"] }) # create clip instances self._get_clip_instances( + folder_entity, otio_timeline, media_path, clip_instance_properties, allowed_product_type_presets, os.path.basename(seq_path), - first_otio_timeline + first_otio_timeline, ) if not first_otio_timeline: @@ -248,7 +248,8 @@ or updating already created. Publishing will create OTIO file. self._create_otio_instance( product_name, instance_data, - seq_path, media_path, + seq_path, + media_path, first_otio_timeline ) @@ -332,6 +333,7 @@ or updating already created. Publishing will create OTIO file. def _get_clip_instances( self, + folder_entity, otio_timeline, media_path, instance_data, @@ -342,6 +344,7 @@ or updating already created. Publishing will create OTIO file. """Helping function for creating clip instance Args: + folder_entity (dict[str, Any]): Folder entity. otio_timeline (otio.Timeline): otio timeline object media_path (str): media file path string instance_data (dict): clip instance data @@ -373,7 +376,6 @@ or updating already created. Publishing will create OTIO file. if not self._validate_clip_for_processing(otio_clip): continue - # get available frames info to clip data self._create_otio_reference(otio_clip, media_path, media_data) @@ -383,7 +385,8 @@ or updating already created. Publishing will create OTIO file. base_instance_data = self._get_base_instance_data( otio_clip, instance_data, - track_start_frame + track_start_frame, + folder_entity ) parenting_data = { @@ -399,7 +402,7 @@ or updating already created. Publishing will create OTIO file. ): continue - instance = self._make_product_instance( + self._make_product_instance( otio_clip, product_type_preset, deepcopy(base_instance_data), @@ -566,7 +569,7 @@ or updating already created. Publishing will create OTIO file. return c_instance def _make_product_naming(self, product_type_preset, instance_data): - """Subset name maker + """Product name maker Args: product_type_preset (dict): single preset item @@ -575,7 +578,7 @@ or updating already created. Publishing will create OTIO file. Returns: str: label string """ - asset_name = instance_data["creator_attributes"]["folderPath"] + folder_path = instance_data["creator_attributes"]["folderPath"] variant_name = instance_data["variant"] product_type = product_type_preset["product_type"] @@ -588,7 +591,7 @@ or updating already created. Publishing will create OTIO file. product_type, _variant_name.capitalize() ) label = "{} {}".format( - asset_name, + folder_path, product_name ) @@ -606,6 +609,7 @@ or updating already created. Publishing will create OTIO file. otio_clip, instance_data, track_start_frame, + folder_entity, ): """Factoring basic set of instance data. @@ -616,9 +620,12 @@ or updating already created. Publishing will create OTIO file. Returns: dict: instance data + """ + parent_folder_path = folder_entity["path"] + parent_folder_name = parent_folder_path.rsplit("/", 1)[-1] + # get clip instance properties - parent_asset_name = instance_data["parent_asset_name"] handle_start = instance_data["handle_start"] handle_end = instance_data["handle_end"] timeline_offset = instance_data["timeline_offset"] @@ -626,9 +633,9 @@ or updating already created. Publishing will create OTIO file. fps = instance_data["fps"] variant_name = instance_data["variant"] - # basic unique asset name + # basic unique folder name clip_name = os.path.splitext(otio_clip.name)[0] - project_doc = get_project(self.project_name) + project_entity = ayon_api.get_project(self.project_name) shot_name, shot_metadata = self._shot_metadata_solver.generate_data( clip_name, @@ -636,14 +643,13 @@ or updating already created. Publishing will create OTIO file. "anatomy_data": { "project": { "name": self.project_name, - "code": project_doc["data"]["code"] + "code": project_entity["code"] }, - "parent": parent_asset_name, + "parent": parent_folder_name, "app": self.host_name }, - "selected_asset_doc": get_asset_by_name( - self.project_name, parent_asset_name), - "project_doc": project_doc + "selected_folder_entity": folder_entity, + "project_entity": project_entity } ) @@ -669,7 +675,7 @@ or updating already created. Publishing will create OTIO file. base_instance_data = { "shotName": shot_name, "variant": variant_name, - "task": "", + "task": None, "newAssetPublishing": True, "trackStartFrame": track_start_frame, "timelineOffset": timeline_offset, @@ -680,7 +686,7 @@ or updating already created. Publishing will create OTIO file. # update base instance data with context data # and also update creator attributes with context data creator_attributes["folderPath"] = shot_metadata.pop("folderPath") - base_instance_data["folderPath"] = parent_asset_name + base_instance_data["folderPath"] = parent_folder_path # add creator attributes to shared instance data base_instance_data["creator_attributes"] = creator_attributes diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py index 9b3dfdd334..546408b4d6 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_movie_batch.py @@ -1,8 +1,10 @@ import copy import os import re +import collections + +import ayon_api -from ayon_core.client import get_asset_name_identifier from ayon_core.lib import ( FileDef, BoolDef, @@ -17,7 +19,7 @@ from ayon_core.pipeline.create import ( from ayon_core.hosts.traypublisher.api.plugin import TrayPublishCreator from ayon_core.hosts.traypublisher.batch_parsing import ( - get_asset_doc_from_file_name + get_folder_entity_from_filename ) @@ -53,21 +55,46 @@ class BatchMovieCreator(TrayPublishCreator): if not file_paths: return + data_by_folder_id = collections.defaultdict(list) for file_info in file_paths: instance_data = copy.deepcopy(data) file_name = file_info["filenames"][0] filepath = os.path.join(file_info["directory"], file_name) instance_data["creator_attributes"] = {"filepath": filepath} - asset_doc, version = get_asset_doc_from_file_name( - file_name, self.project_name, self.version_regex) + folder_entity, version = get_folder_entity_from_filename( + self.project_name, file_name, self.version_regex) + data_by_folder_id[folder_entity["id"]].append( + (instance_data, folder_entity) + ) - product_name, task_name = self._get_product_and_task( - asset_doc, data["variant"], self.project_name) + all_task_entities = ayon_api.get_tasks( + self.project_name, task_ids=set(data_by_folder_id.keys()) + ) + task_entity_by_folder_id = collections.defaultdict(dict) + for task_entity in all_task_entities: + folder_id = task_entity["folderId"] + task_name = task_entity["name"].lower() + task_entity_by_folder_id[folder_id][task_name] = task_entity - asset_name = get_asset_name_identifier(asset_doc) + for ( + folder_id, (instance_data, folder_entity) + ) in data_by_folder_id.items(): + task_entities_by_name = task_entity_by_folder_id[folder_id] + task_name = None + task_entity = None + for default_task_name in self.default_tasks: + _name = default_task_name.lower() + if _name in task_entities_by_name: + task_name = task_entity["name"] + task_entity = task_entities_by_name[_name] + break - instance_data["folderPath"] = asset_name + product_name = self._get_product_name( + self.project_name, task_entity, data["variant"] + ) + + instance_data["folderPath"] = folder_entity["path"] instance_data["task"] = task_name # Create new instance @@ -75,15 +102,18 @@ class BatchMovieCreator(TrayPublishCreator): instance_data, self) self._store_new_instance(new_instance) - def _get_product_and_task(self, asset_doc, variant, project_name): + def _get_product_name(self, project_name, task_entity, variant): """Create product name according to standard template process""" - task_name = self._get_task_name(asset_doc) host_name = self.create_context.host_name + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] try: product_name = get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, self.product_type, variant, @@ -92,34 +122,18 @@ class BatchMovieCreator(TrayPublishCreator): # Create instance with fake task # - instance will be marked as invalid so it can't be published # but user have ability to change it - # NOTE: This expect that there is not task 'Undefined' on asset - task_name = "Undefined" + # NOTE: This expect that there is not task 'Undefined' on folder + dumb_value = "Undefined" product_name = get_product_name( project_name, - asset_doc, - task_name, + dumb_value, + dumb_value, host_name, self.product_type, variant, ) - return product_name, task_name - - def _get_task_name(self, asset_doc): - """Get applicable task from 'asset_doc' """ - available_task_names = {} - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - for task_name in asset_tasks.keys(): - available_task_names[task_name.lower()] = task_name - - task_name = None - for _task_name in self.default_tasks: - _task_name_low = _task_name.lower() - if _task_name_low in available_task_names: - task_name = available_task_names[_task_name_low] - break - - return task_name + return product_name def get_instance_attr_defs(self): return [ @@ -149,8 +163,8 @@ class BatchMovieCreator(TrayPublishCreator): ] def get_detail_description(self): - return """# Publish batch of .mov to multiple assets. + return """# Publish batch of .mov to multiple folders. - File names must then contain only asset name, or asset name + version. + File names must then contain only folder name, or folder name + version. (eg. 'chair.mov', 'chair_v001.mov', not really safe `my_chair_v001.mov` """ diff --git a/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py b/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py index a25da0bf34..f48037701e 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py +++ b/client/ayon_core/hosts/traypublisher/plugins/create/create_online.py @@ -3,11 +3,12 @@ Online file retain their original name and use it as product name. To avoid conflicts, this creator checks if product with this name already -exists under selected asset. +exists under selected folder. """ from pathlib import Path -# from ayon_core.client import get_subset_by_name, get_asset_by_name +# import ayon_api + from ayon_core.lib.attribute_definitions import FileDef, BoolDef from ayon_core.pipeline import ( CreatedInstance, @@ -52,14 +53,14 @@ class OnlineCreator(TrayPublishCreator): # disable check for existing product with the same name """ - asset = get_asset_by_name( - self.project_name, instance_data["folderPath"], fields=["_id"]) + folder_entity = ayon_api.get_folder_by_path( + self.project_name, instance_data["folderPath"], fields={"id"}) - if get_subset_by_name( - self.project_name, origin_basename, asset["_id"], - fields=["_id"]): + if ayon_api.get_product_by_name( + self.project_name, origin_basename, folder_entity["id"], + fields={"id"}): raise CreatorError(f"product with {origin_basename} already " - "exists in selected asset") + "exists in selected folder") """ instance_data["originalBasename"] = origin_basename @@ -103,8 +104,8 @@ class OnlineCreator(TrayPublishCreator): def get_product_name( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name=None, instance=None diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py new file mode 100644 index 0000000000..33536d0854 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_csv_ingest_instance_data.py @@ -0,0 +1,47 @@ +from pprint import pformat +import pyblish.api +from ayon_core.pipeline import publish + + +class CollectCSVIngestInstancesData( + pyblish.api.InstancePlugin, + publish.AYONPyblishPluginMixin, + publish.ColormanagedPyblishPluginMixin +): + """Collect CSV Ingest data from instance. + """ + + label = "Collect CSV Ingest instances data" + order = pyblish.api.CollectorOrder + 0.1 + hosts = ["traypublisher"] + families = ["csv_ingest"] + + def process(self, instance): + + # expecting [(colorspace, repre_data), ...] + prepared_repres_data_items = instance.data[ + "prepared_data_for_repres"] + + for prep_repre_data in prepared_repres_data_items: + type = prep_repre_data["type"] + colorspace = prep_repre_data["colorspace"] + repre_data = prep_repre_data["representation"] + + # thumbnails should be skipped + if type == "media": + # colorspace name is passed from CSV column + self.set_representation_colorspace( + repre_data, instance.context, colorspace + ) + elif type == "media" and colorspace is None: + # TODO: implement colorspace file rules file parsing + self.log.warning( + "Colorspace is not defined in csv for following" + f" representation: {pformat(repre_data)}" + ) + pass + elif type == "thumbnail": + # thumbnails should be skipped + pass + + instance.data["representations"].append(repre_data) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_frame_data_from_asset_entity.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_frame_data_from_asset_entity.py index e8a2cae16c..4d203649c7 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_frame_data_from_asset_entity.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_frame_data_from_asset_entity.py @@ -2,14 +2,14 @@ import pyblish.api class CollectFrameDataFromAssetEntity(pyblish.api.InstancePlugin): - """Collect Frame Data From AssetEntity found in context + """Collect Frame Data From 'folderEntity' found in context. Frame range data will only be collected if the keys are not yet collected for the instance. """ order = pyblish.api.CollectorOrder + 0.491 - label = "Collect Missing Frame Data From Asset" + label = "Collect Missing Frame Data From Folder" families = ["plate", "pointcache", "vdbcache", "online", "render"] @@ -27,11 +27,11 @@ class CollectFrameDataFromAssetEntity(pyblish.api.InstancePlugin): if key not in instance.data: missing_keys.append(key) keys_set = [] + folder_attributes = instance.data["folderEntity"]["attrib"] for key in missing_keys: - asset_data = instance.data["assetEntity"]["data"] - if key in asset_data: - instance.data[key] = asset_data[key] + if key in folder_attributes: + instance.data[key] = folder_attributes[key] keys_set.append(key) if keys_set: self.log.debug(f"Frame range data {keys_set} " - "has been collected from asset entity.") + "has been collected from folder entity.") diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_review_frames.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_review_frames.py index 6b41c0dd21..7eceda968a 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_review_frames.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_review_frames.py @@ -20,12 +20,12 @@ class CollectReviewInfo(pyblish.api.InstancePlugin): hosts = ["traypublisher"] def process(self, instance): - asset_entity = instance.data.get("assetEntity") - if instance.data.get("frameStart") is not None or not asset_entity: + folder_entity = instance.data.get("folderEntity") + if instance.data.get("frameStart") is not None or not folder_entity: self.log.debug("Missing required data on instance") return - asset_data = asset_entity["data"] + folder_attributes = folder_entity["attrib"] # Store collected data for logging collected_data = {} for key in ( @@ -35,9 +35,9 @@ class CollectReviewInfo(pyblish.api.InstancePlugin): "handleStart", "handleEnd", ): - if key in instance.data or key not in asset_data: + if key in instance.data or key not in folder_attributes: continue - value = asset_data[key] + value = folder_attributes[key] collected_data[key] = value instance.data[key] = value self.log.debug("Collected data: {}".format(str(collected_data))) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_sequence_frame_data.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_sequence_frame_data.py index 7eded0f6f5..de18050f41 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_sequence_frame_data.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_sequence_frame_data.py @@ -28,9 +28,9 @@ class CollectSequenceFrameData( return # editorial would fail since they might not be in database yet - new_asset_publishing = instance.data.get("newAssetPublishing") - if new_asset_publishing: - self.log.debug("Instance is creating new asset. Skipping.") + new_folder_publishing = instance.data.get("newAssetPublishing") + if new_folder_publishing: + self.log.debug("Instance is creating new folders. Skipping.") return frame_data = self.get_frame_data_from_repre_sequence(instance) @@ -43,10 +43,9 @@ class CollectSequenceFrameData( instance.data[key] = value self.log.debug(f"Collected Frame range data '{key}':{value} ") - def get_frame_data_from_repre_sequence(self, instance): repres = instance.data.get("representations") - asset_data = instance.data["assetEntity"]["data"] + folder_attributes = instance.data["folderEntity"]["attrib"] if repres: first_repre = repres[0] @@ -56,6 +55,9 @@ class CollectSequenceFrameData( return files = first_repre["files"] + if not isinstance(files, list): + files = [files] + collections, _ = clique.assemble(files) if not collections: # No sequences detected and we can't retrieve @@ -72,5 +74,5 @@ class CollectSequenceFrameData( "frameEnd": repres_frames[-1], "handleStart": 0, "handleEnd": 0, - "fps": asset_data["fps"] + "fps": folder_attributes["fps"] } diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py index d489528c57..5a2f5cbc20 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/collect_shot_instances.py @@ -154,8 +154,9 @@ class CollectShotInstance(pyblish.api.InstancePlugin): handle_end = int(instance.data["handleEnd"]) in_info = { - "entity_type": "Shot", - "custom_attributes": { + "entity_type": "folder", + "folder_type": "Shot", + "attributes": { "handleStart": handle_start, "handleEnd": handle_end, "frameStart": instance.data["frameStart"], @@ -169,19 +170,18 @@ class CollectShotInstance(pyblish.api.InstancePlugin): parents = instance.data.get('parents', []) - # Split by '/' for AYON where asset is a path - asset_name = instance.data["folderPath"].split("/")[-1] - actual = {asset_name: in_info} + folder_name = instance.data["folderPath"].split("/")[-1] + actual = {folder_name: in_info} for parent in reversed(parents): parent_name = parent["entity_name"] - next_dict = { - parent_name: { - "entity_type": parent["entity_type"], - "childs": actual - } + parent_info = { + "entity_type": parent["entity_type"], + "children": actual, } - actual = next_dict + if parent_info["entity_type"] == "folder": + parent_info["folder_type"] = parent["folder_type"] + actual = {parent_name: parent_info} final_context = self._update_dict(final_context, actual) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py b/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py new file mode 100644 index 0000000000..4bdf7c0493 --- /dev/null +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/extract_csv_file.py @@ -0,0 +1,31 @@ +import pyblish.api + +from ayon_core.pipeline import publish + + +class ExtractCSVFile(publish.Extractor): + """ + Extractor export CSV file + """ + + label = "Extract CSV file" + order = pyblish.api.ExtractorOrder - 0.45 + families = ["csv_ingest_file"] + hosts = ["traypublisher"] + + def process(self, instance): + + csv_file_data = instance.data["csvFileData"] + + representation_csv = { + 'name': "csv_data", + 'ext': "csv", + 'files': csv_file_data["filename"], + "stagingDir": csv_file_data["staging_dir"], + "stagingDir_persistent": True + } + + instance.data["representations"].append(representation_csv) + + self.log.info("Added CSV file representation: {}".format( + representation_csv)) diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml b/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml index 726ccdffe3..89997b4c8c 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/help/validate_existing_version.xml @@ -5,7 +5,7 @@ ## Version already exists -Version {version} you have set on instance '{product_name}' under '{asset_name}' already exists. This validation is enabled by default to prevent accidental override of existing versions. +Version {version} you have set on instance '{product_name}' under '{folder_path}' already exists. This validation is enabled by default to prevent accidental override of existing versions. ### How to repair? - Click on 'Repair' action -> this will change version to next available. diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py index ddfe8904fa..0b4f8e16c1 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_existing_version.py @@ -16,6 +16,7 @@ class ValidateExistingVersion( order = ValidateContentsOrder hosts = ["traypublisher"] + targets = ["local"] actions = [RepairAction] @@ -40,7 +41,7 @@ class ValidateExistingVersion( formatting_data = { "product_name": product_name, - "asset_name": instance.data["folderPath"], + "folder_path": instance.data["folderPath"], "version": version } raise PublishXmlValidationError( diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py index cd4a98b84d..13f13b05bb 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_frame_ranges.py @@ -16,11 +16,13 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, label = "Validate Frame Range" hosts = ["traypublisher"] families = ["render", "plate"] + targets = ["local"] + order = ValidateContentsOrder optional = True # published data might be sequence (.mov, .mp4) in that counting files - # doesnt make sense + # doesn't make sense check_extensions = ["exr", "dpx", "jpg", "jpeg", "png", "tiff", "tga", "gif", "svg"] skip_timelines_check = [] # skip for specific task names (regex) @@ -31,9 +33,9 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, return # editorial would fail since they might not be in database yet - new_asset_publishing = instance.data.get("newAssetPublishing") - if new_asset_publishing: - self.log.debug("Instance is creating new asset. Skipping.") + new_folder_publishing = instance.data.get("newAssetPublishing") + if new_folder_publishing: + self.log.debug("Instance is creating new folder. Skipping.") return if (self.skip_timelines_check and @@ -41,12 +43,11 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, for pattern in self.skip_timelines_check)): self.log.info("Skipping for {} task".format(instance.data["task"])) - asset_doc = instance.data["assetEntity"] - asset_data = asset_doc["data"] - frame_start = asset_data["frameStart"] - frame_end = asset_data["frameEnd"] - handle_start = asset_data["handleStart"] - handle_end = asset_data["handleEnd"] + folder_attributes = instance.data["folderEntity"]["attrib"] + frame_start = folder_attributes["frameStart"] + frame_end = folder_attributes["frameEnd"] + handle_start = folder_attributes["handleStart"] + handle_end = folder_attributes["handleEnd"] duration = (frame_end - frame_start + 1) + handle_start + handle_end repres = instance.data.get("representations") @@ -68,7 +69,7 @@ class ValidateFrameRange(OptionalPyblishPluginMixin, msg = ( "Frame duration from DB:'{}' doesn't match number of files:'{}'" - " Please change frame range for Asset or limit no. of files" + " Please change frame range for Folder or limit no. of files" ). format(int(duration), frames) formatting_data = {"duration": duration, diff --git a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py index 3bd55342af..e9add2369b 100644 --- a/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py +++ b/client/ayon_core/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import ayon_api import pyblish.api from ayon_core.pipeline.publish import ( @@ -6,7 +7,6 @@ from ayon_core.pipeline.publish import ( PublishValidationError, OptionalPyblishPluginMixin, ) -from ayon_core.client import get_subset_by_name class ValidateOnlineFile(OptionalPyblishPluginMixin, @@ -23,12 +23,12 @@ class ValidateOnlineFile(OptionalPyblishPluginMixin, if not self.is_active(instance.data): return project_name = instance.context.data["projectName"] - asset_id = instance.data["assetEntity"]["_id"] - subset_doc = get_subset_by_name( - project_name, instance.data["productName"], asset_id) + folder_id = instance.data["folderEntity"]["id"] + product_entity = ayon_api.get_product_by_name( + project_name, instance.data["productName"], folder_id) - if subset_doc: + if product_entity: raise PublishValidationError( - "Subset to be published already exists.", + "Product to be published already exists.", title=self.label ) diff --git a/client/ayon_core/hosts/tvpaint/api/pipeline.py b/client/ayon_core/hosts/tvpaint/api/pipeline.py index 1b0227e89c..6f5c4d49d4 100644 --- a/client/ayon_core/hosts/tvpaint/api/pipeline.py +++ b/client/ayon_core/hosts/tvpaint/api/pipeline.py @@ -4,10 +4,9 @@ import tempfile import logging import requests - +import ayon_api import pyblish.api -from ayon_core.client import get_asset_by_name from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost from ayon_core.hosts.tvpaint import TVPAINT_ROOT_DIR from ayon_core.settings import get_current_project_settings @@ -93,10 +92,10 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): return self.get_current_context().get("project_name") - def get_current_asset_name(self): + def get_current_folder_path(self): """ Returns: - Union[str, None]: Current asset name. + Union[str, None]: Current folder path. """ return self.get_current_context().get("folder_path") @@ -183,13 +182,13 @@ class TVPaintHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): log.info("Setting up project...") global_context = get_global_context() project_name = global_context.get("project_name") - asset_name = global_context.get("aset_name") - if not project_name or not asset_name: + folder_path = global_context.get("folder_path") + if not project_name or not folder_path: return - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) - set_context_settings(project_name, asset_doc) + set_context_settings(project_name, folder_entity) def application_exit(self): """Logic related to TimerManager. @@ -234,7 +233,7 @@ def containerise( "name": name, "namespace": namespace, "loader": str(loader), - "representation": str(context["representation"]["_id"]) + "representation": context["representation"]["id"] } if current_containers is None: current_containers = get_containers() @@ -466,17 +465,24 @@ def get_containers(): return output -def set_context_settings(project_name, asset_doc): - """Set workfile settings by asset document data. +def set_context_settings(project_name, folder_entity): + """Set workfile settings by folder entity attributes. Change fps, resolution and frame start/end. + + Args: + project_name (str): Project name. + folder_entity (dict[str, Any]): Folder entity. + """ - width_key = "resolutionWidth" - height_key = "resolutionHeight" + if not folder_entity: + return - width = asset_doc["data"].get(width_key) - height = asset_doc["data"].get(height_key) + folder_attributes = folder_entity["attrib"] + + width = folder_attributes.get("resolutionWidth") + height = folder_attributes.get("resolutionHeight") if width is None or height is None: print("Resolution was not found!") else: @@ -484,7 +490,7 @@ def set_context_settings(project_name, asset_doc): "tv_resizepage {} {} 0".format(width, height) ) - framerate = asset_doc["data"].get("fps") + framerate = folder_attributes.get("fps") if framerate is not None: execute_george( @@ -493,15 +499,15 @@ def set_context_settings(project_name, asset_doc): else: print("Framerate was not found!") - frame_start = asset_doc["data"].get("frameStart") - frame_end = asset_doc["data"].get("frameEnd") + frame_start = folder_attributes.get("frameStart") + frame_end = folder_attributes.get("frameEnd") if frame_start is None or frame_end is None: print("Frame range was not found!") return - handle_start = asset_doc["data"].get("handleStart") - handle_end = asset_doc["data"].get("handleEnd") + handle_start = folder_attributes.get("handleStart") + handle_end = folder_attributes.get("handleEnd") # Always start from 0 Mark In and set only Mark Out mark_in = 0 diff --git a/client/ayon_core/hosts/tvpaint/api/plugin.py b/client/ayon_core/hosts/tvpaint/api/plugin.py index ef9f82b783..e715b959f4 100644 --- a/client/ayon_core/hosts/tvpaint/api/plugin.py +++ b/client/ayon_core/hosts/tvpaint/api/plugin.py @@ -56,20 +56,29 @@ class TVPaintCreatorCommon: def _custom_get_product_name( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name=None, instance=None ): dynamic_data = self.get_dynamic_data( - project_name, asset_doc, task_name, variant, host_name, instance + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ) + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] return get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, self.product_type, variant, @@ -107,13 +116,15 @@ class TVPaintCreator(Creator, TVPaintCreatorCommon): self._remove_instance_from_context(instance) def get_dynamic_data(self, *args, **kwargs): - # Change asset and name by current workfile context + # Change folder and name by current workfile context create_context = self.create_context - asset_name = create_context.get_current_asset_name() + folder_path = create_context.get_current_folder_path() task_name = create_context.get_current_task_name() output = {} - if asset_name: - output["asset"] = asset_name + if folder_path: + folder_name = folder_path.rsplit("/")[-1] + output["asset"] = folder_name + output["folder"] = {"name": folder_name} if task_name: output["task"] = task_name return output @@ -152,22 +163,22 @@ class Loader(LoaderPlugin): ] return container["members"] - def get_unique_layer_name(self, asset_name, name): + def get_unique_layer_name(self, namespace, name): """Layer name with counter as suffix. Find higher 3 digit suffix from all layer names in scene matching regex - `{asset_name}_{name}_{suffix}`. Higher 3 digit suffix is used + `{namespace}_{name}_{suffix}`. Higher 3 digit suffix is used as base for next number if scene does not contain layer matching regex `0` is used ase base. Args: - asset_name (str): Name of product's parent asset document. + namespace (str): Usually folder name. name (str): Name of loaded product. Returns: - (str): `{asset_name}_{name}_{higher suffix + 1}` + str: `{namespace}_{name}_{higher suffix + 1}` """ - layer_name_base = "{}_{}".format(asset_name, name) + layer_name_base = "{}_{}".format(namespace, name) counter_regex = re.compile(r"_(\d{3})$") diff --git a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py index 25e324c5cc..691b81e089 100644 --- a/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py +++ b/client/ayon_core/hosts/tvpaint/hooks/pre_launch_args.py @@ -1,5 +1,5 @@ from ayon_core.lib import get_ayon_launcher_args -from ayon_core.lib.applications import PreLaunchHook, LaunchTypes +from ayon_applications import PreLaunchHook, LaunchTypes class TvpaintPrelaunchHook(PreLaunchHook): diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py b/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py index 1415adac2b..34fe0ce8f4 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/convert_legacy.py @@ -1,14 +1,14 @@ import collections from ayon_core.pipeline.create.creator_plugins import ( - SubsetConvertorPlugin, + ProductConvertorPlugin, cache_and_get_instances, ) from ayon_core.hosts.tvpaint.api.plugin import SHARED_DATA_KEY from ayon_core.hosts.tvpaint.api.lib import get_groups_data -class TVPaintLegacyConverted(SubsetConvertorPlugin): +class TVPaintLegacyConverted(ProductConvertorPlugin): """Conversion of legacy instances in scene to new creators. This convertor handles only instances created by core creators. diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py b/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py index dc53ccb9ca..dc9c2466e0 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/create_render.py @@ -37,7 +37,8 @@ Todos: import collections from typing import Any, Optional, Union -from ayon_core.client import get_asset_by_name, get_asset_name_identifier +import ayon_api + from ayon_core.lib import ( prepare_template_data, AbstractAttrDef, @@ -149,10 +150,21 @@ class CreateRenderlayer(TVPaintCreator): self.mark_for_review = plugin_settings["mark_for_review"] def get_dynamic_data( - self, project_name, asset_doc, task_name, variant, host_name, instance + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ): dynamic_data = super().get_dynamic_data( - project_name, asset_doc, task_name, variant, host_name, instance + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ) dynamic_data["renderpass"] = self.default_pass_name dynamic_data["renderlayer"] = variant @@ -208,7 +220,7 @@ class CreateRenderlayer(TVPaintCreator): creator_attributes["group_id"] = group_id creator_attributes["mark_for_review"] = mark_for_review - self.log.info(f"Subset name is {product_name}") + self.log.info(f"Product name is {product_name}") new_instance = CreatedInstance( self.product_type, product_name, @@ -425,10 +437,21 @@ class CreateRenderPass(TVPaintCreator): self._add_instance_to_context(instance) def get_dynamic_data( - self, project_name, asset_doc, task_name, variant, host_name, instance + self, + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ): dynamic_data = super().get_dynamic_data( - project_name, asset_doc, task_name, variant, host_name, instance + project_name, + folder_entity, + task_entity, + variant, + host_name, + instance ) dynamic_data["renderpass"] = variant dynamic_data["renderlayer"] = "{renderlayer}" @@ -576,7 +599,7 @@ class CreateRenderPass(TVPaintCreator): if filtered_layers: self.log.info(( "Changing group of " - f"{','.join([l['name'] for l in filtered_layers])}" + f"{','.join([layer['name'] for layer in filtered_layers])}" f" to {group_id}" )) george_lines = [ @@ -737,7 +760,9 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): grg_lines: list[str] = [] for group_id, group_name in new_group_name_by_id.items(): group: dict[str, Any] = groups_by_id[group_id] - grg_line: str = "tv_layercolor \"setcolor\" {} {} {} {} {}".format( + grg_line: str = ( + "tv_layercolor \"setcolor\" {} {} {} {} {} \"{}\"" + ).format( group["clip_id"], group_id, group["red"], @@ -754,8 +779,8 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): def _prepare_render_layer( self, project_name: str, - asset_doc: dict[str, Any], - task_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], group_id: int, groups: list[dict[str, Any]], mark_for_review: bool, @@ -772,6 +797,7 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): if not match_group: return None + task_name = task_entity["name"] variant: str = match_group["name"] creator: CreateRenderlayer = ( self.create_context.creators[CreateRenderlayer.identifier] @@ -779,20 +805,19 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): product_name: str = creator.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name=self.create_context.host_name, ) - asset_name = get_asset_name_identifier(asset_doc) if existing_instance is not None: - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_entity["path"] existing_instance["task"] = task_name existing_instance["productName"] = product_name return existing_instance instance_data: dict[str, str] = { - "folderPath": asset_name, + "folderPath": folder_entity["path"], "task": task_name, "productType": creator.product_type, "variant": variant, @@ -806,13 +831,14 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): def _prepare_render_passes( self, project_name: str, - asset_doc: dict[str, Any], - task_name: str, + folder_entity: dict[str, Any], + task_entity: dict[str, Any], render_layer_instance: CreatedInstance, layers: list[dict[str, Any]], mark_for_review: bool, existing_render_passes: list[CreatedInstance] ): + task_name = task_entity["name"] creator: CreateRenderPass = ( self.create_context.creators[CreateRenderPass.identifier] ) @@ -821,8 +847,6 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): for layer_name in render_pass["layer_names"]: render_pass_by_layer_name[layer_name] = render_pass - asset_name = get_asset_name_identifier(asset_doc) - for layer in layers: layer_name = layer["name"] variant = layer_name @@ -833,21 +857,21 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): product_name = creator.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name=self.create_context.host_name, instance=render_pass ) if render_pass is not None: - render_pass["folderPath"] = asset_name + render_pass["folderPath"] = folder_entity["path"] render_pass["task"] = task_name render_pass["productName"] = product_name continue instance_data: dict[str, str] = { - "folderPath": asset_name, + "folderPath": folder_entity["path"], "task": task_name, "productType": creator.product_type, "variant": variant @@ -886,10 +910,13 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): def create(self, product_name, instance_data, pre_create_data): project_name: str = self.create_context.get_current_project_name() - asset_name: str = instance_data["folderPath"] + folder_path: str = instance_data["folderPath"] task_name: str = instance_data["task"] - asset_doc: dict[str, Any] = get_asset_by_name( - project_name, asset_name) + folder_entity: dict[str, Any] = ayon_api.get_folder_by_path( + project_name, folder_path) + task_entity: dict[str, Any] = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) render_layers_by_group_id: dict[int, CreatedInstance] = {} render_passes_by_render_layer_id: dict[int, list[CreatedInstance]] = ( @@ -951,8 +978,8 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): instance: Union[CreatedInstance, None] = ( self._prepare_render_layer( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, group_id, scene_groups, mark_layers_for_review, @@ -972,8 +999,8 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator): self._prepare_render_passes( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, render_layer_instance, layers, mark_passes_for_review, @@ -1047,16 +1074,16 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): def get_dynamic_data( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, instance ): dynamic_data = super().get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, instance @@ -1069,19 +1096,22 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): create_context = self.create_context host_name = create_context.host_name project_name = create_context.get_current_project_name() - asset_name = create_context.get_current_asset_name() + folder_path = create_context.get_current_folder_path() task_name = create_context.get_current_task_name() - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name, ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": self.default_variant, "creator_attributes": { @@ -1118,24 +1148,29 @@ class TVPaintSceneRenderCreator(TVPaintAutoCreator): create_context = self.create_context host_name = create_context.host_name project_name = create_context.get_current_project_name() - asset_name = create_context.get_current_asset_name() + folder_path = create_context.get_current_folder_path() task_name = create_context.get_current_task_name() existing_name = existing_instance.get("folderPath") if ( - existing_name != asset_name + existing_name != folder_path or existing_instance["task"] != task_name ): - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, existing_instance["variant"], host_name, existing_instance ) - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_path existing_instance["task"] = task_name existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py b/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py index 1837726cab..acb4f0f8d6 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/create_review.py @@ -1,4 +1,5 @@ -from ayon_core.client import get_asset_by_name +import ayon_api + from ayon_core.pipeline import CreatedInstance from ayon_core.hosts.tvpaint.api.plugin import TVPaintAutoCreator @@ -30,25 +31,29 @@ class TVPaintReviewCreator(TVPaintAutoCreator): create_context = self.create_context host_name = create_context.host_name project_name = create_context.get_current_project_name() - asset_name = create_context.get_current_asset_name() + folder_path = create_context.get_current_folder_path() task_name = create_context.get_current_task_name() - if existing_instance is None: - existing_asset_name = None - else: - existing_asset_name = existing_instance["folderPath"] + existing_folder_path = None + if existing_instance is not None: + existing_folder_path = existing_instance["folderPath"] if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": self.default_variant, } @@ -65,18 +70,23 @@ class TVPaintReviewCreator(TVPaintAutoCreator): self._add_instance_to_context(new_instance) elif ( - existing_asset_name != asset_name + existing_folder_path != folder_path or existing_instance["task"] != task_name ): - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, existing_instance["variant"], host_name, existing_instance ) - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_path existing_instance["task"] = task_name existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py b/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py index 14a11750a5..f21f41439e 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py +++ b/client/ayon_core/hosts/tvpaint/plugins/create/create_workfile.py @@ -1,4 +1,5 @@ -from ayon_core.client import get_asset_by_name +import ayon_api + from ayon_core.pipeline import CreatedInstance from ayon_core.hosts.tvpaint.api.plugin import TVPaintAutoCreator @@ -26,25 +27,29 @@ class TVPaintWorkfileCreator(TVPaintAutoCreator): create_context = self.create_context host_name = create_context.host_name project_name = create_context.get_current_project_name() - asset_name = create_context.get_current_asset_name() + folder_path = create_context.get_current_folder_path() task_name = create_context.get_current_task_name() - if existing_instance is None: - existing_asset_name = None - else: - existing_asset_name = existing_instance["folderPath"] + existing_folder_path = None + if existing_instance is not None: + existing_folder_path = existing_instance["folderPath"] if existing_instance is None: - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, self.default_variant, host_name ) data = { - "folderPath": asset_name, + "folderPath": folder_path, "task": task_name, "variant": self.default_variant } @@ -58,18 +63,23 @@ class TVPaintWorkfileCreator(TVPaintAutoCreator): self._add_instance_to_context(new_instance) elif ( - existing_asset_name != asset_name + existing_folder_path != folder_path or existing_instance["task"] != task_name ): - asset_doc = get_asset_by_name(project_name, asset_name) + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) product_name = self.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, existing_instance["variant"], host_name, existing_instance ) - existing_instance["folderPath"] = asset_name + existing_instance["folderPath"] = folder_path existing_instance["task"] = task_name existing_instance["productName"] = product_name diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py index 924c0f2835..aad8f92d26 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_image.py @@ -6,8 +6,8 @@ from ayon_core.hosts.tvpaint.api.lib import execute_george_through_file class ImportImage(plugin.Loader): """Load image or image sequence to TVPaint as new layer.""" - families = ["render", "image", "background", "plate", "review"] - representations = ["*"] + product_types = {"render", "image", "background", "plate", "review"} + representations = {"*"} label = "Import Image" order = 1 @@ -68,10 +68,10 @@ class ImportImage(plugin.Loader): load_options_str += (load_option + " ") # Prepare layer name - asset_name = context["asset"]["name"] + folder_name = context["folder"]["name"] version_name = context["version"]["name"] layer_name = "{}_{}_v{:0>3}".format( - asset_name, + folder_name, name, version_name ) diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py index 856bf69845..a7fcb9f4a4 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_reference_image.py @@ -1,10 +1,7 @@ import collections from ayon_core.lib.attribute_definitions import BoolDef -from ayon_core.pipeline import ( - get_representation_context, - register_host, -) +from ayon_core.pipeline import registered_host from ayon_core.hosts.tvpaint.api import plugin from ayon_core.hosts.tvpaint.api.lib import ( get_layers_data, @@ -20,8 +17,8 @@ from ayon_core.hosts.tvpaint.api.pipeline import ( class LoadImage(plugin.Loader): """Load image or image sequence to TVPaint as new layer.""" - families = ["render", "image", "background", "plate", "review"] - representations = ["*"] + product_types = {"render", "image", "background", "plate", "review"} + representations = {"*"} label = "Load Image" order = 1 @@ -82,9 +79,9 @@ class LoadImage(plugin.Loader): load_options_str += (load_option + " ") # Prepare layer name - asset_name = context["asset"]["name"] - product_name = context["subset"]["name"] - layer_name = self.get_unique_layer_name(asset_name, product_name) + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + layer_name = self.get_unique_layer_name(folder_name, product_name) path = self.filepath_from_context(context) @@ -176,7 +173,7 @@ class LoadImage(plugin.Loader): return representation = container["representation"] members = self.get_members_from_container(container) - host = register_host() + host = registered_host() current_containers = host.get_containers() pop_idx = None for idx, cur_con in enumerate(current_containers): @@ -210,16 +207,15 @@ class LoadImage(plugin.Loader): def switch(self, container, representation): self.update(container, representation) - def update(self, container, representation): + def update(self, container, context): """Replace container with different version. New layers are loaded as first step. Then is tried to change data in new layers with data from old layers. When that is done old layers are removed. """ - # Create new containers first - context = get_representation_context(representation) + # Create new containers first # Get layer ids from previous container old_layer_names = self.get_members_from_container(container) diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py index 86f3e6857f..7e8c8022d6 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_sound.py @@ -22,8 +22,8 @@ class ImportSound(plugin.Loader): file contain any audio. """ - families = ["audio", "review", "plate"] - representations = ["*"] + product_types = {"audio", "review", "plate"} + representations = {"*"} label = "Import Sound" order = 1 @@ -54,7 +54,7 @@ class ImportSound(plugin.Loader): def load(self, context, name, namespace, options): # Create temp file for output output_file = tempfile.NamedTemporaryFile( - mode="w", prefix="pype_tvp_", suffix=".txt", delete=False + mode="w", prefix="ayon_tvp_", suffix=".txt", delete=False ) output_file.close() output_filepath = output_file.name.replace("\\", "/") diff --git a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py index 49ef9fc37b..07c2d91533 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py +++ b/client/ayon_core/hosts/tvpaint/plugins/load/load_workfile.py @@ -1,6 +1,5 @@ import os -from ayon_core.lib import StringTemplate from ayon_core.pipeline import ( registered_host, get_current_context, @@ -24,8 +23,8 @@ from ayon_core.pipeline.version_start import get_versioning_start class LoadWorkfile(plugin.Loader): """Load workfile.""" - families = ["workfile"] - representations = ["tvpp"] + product_types = {"workfile"} + representations = {"tvpp"} label = "Load Workfile" @@ -51,30 +50,36 @@ class LoadWorkfile(plugin.Loader): # Save workfile. host_name = "tvpaint" - project_name = work_context.get("project") - asset_name = work_context.get("asset") - task_name = work_context.get("task") + if "project_name" in work_context: + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + else: + project_name = work_context.get("project") + folder_path = work_context.get("asset") + task_name = work_context.get("task") + # Far cases when there is workfile without work_context - if not asset_name: + if not folder_path: context = get_current_context() project_name = context["project_name"] - asset_name = context["folder_path"] + folder_path = context["folder_path"] task_name = context["task_name"] template_key = get_workfile_template_key_from_context( - asset_name, + project_name, + folder_path, task_name, host_name, - project_name=project_name ) anatomy = Anatomy(project_name) data = get_template_data_with_names( - project_name, asset_name, task_name, host_name + project_name, folder_path, task_name, host_name ) data["root"] = anatomy.roots - file_template = anatomy.templates[template_key]["file"] + work_template = anatomy.get_template_item("work", template_key) # Define saving file extension extensions = host.get_workfile_extensions() @@ -85,14 +90,11 @@ class LoadWorkfile(plugin.Loader): # Fall back to the first extension supported for this host. extension = extensions[0] - data["ext"] = extension + data["ext"] = extension.lstrip(".") - folder_template = anatomy.templates[template_key]["folder"] - work_root = StringTemplate.format_strict_template( - folder_template, data - ) + work_root = work_template["directory"].format_strict(data) version = get_last_workfile_with_version( - work_root, file_template, data, extensions + work_root, work_template["file"].template, data, extensions )[1] if version is None: @@ -108,8 +110,6 @@ class LoadWorkfile(plugin.Loader): data["version"] = version - filename = StringTemplate.format_strict_template( - file_template, data - ) + filename = work_template["file"].format_strict(data) path = os.path.join(work_root, filename) host.save_workfile(path) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py index e7b7b2cad1..5f134a0cd0 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_instance_frames.py @@ -15,14 +15,14 @@ class CollectOutputFrameRange(pyblish.api.InstancePlugin): families = ["review", "render"] def process(self, instance): - asset_doc = instance.data.get("assetEntity") - if not asset_doc: + folder_entity = instance.data.get("folderEntity") + if not folder_entity: return context = instance.context - frame_start = asset_doc["data"]["frameStart"] - fps = asset_doc["data"]["fps"] + frame_start = folder_entity["attrib"]["frameStart"] + fps = folder_entity["attrib"]["fps"] frame_end = frame_start + ( context.data["sceneMarkOut"] - context.data["sceneMarkIn"] ) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py index 414b09c123..3155773bda 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -92,11 +92,11 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): os.environ[env_key] = workfile_context[key] self.log.info("Context changed to: {}".format(workfile_context)) - asset_name = workfile_context["folder_path"] + folder_path = workfile_context["folder_path"] task_name = workfile_context["task_name"] else: - asset_name = current_context["folder_path"] + folder_path = current_context["folder_path"] task_name = current_context["task_name"] # Handle older workfiles or workfiles without metadata self.log.warning(( @@ -104,12 +104,12 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): " Using current Session context." )) - # Store context asset name - context.data["folderPath"] = asset_name + # Store context folder path + context.data["folderPath"] = folder_path context.data["task"] = task_name self.log.info( - "Context is set to Asset: \"{}\" and Task: \"{}\"".format( - asset_name, task_name + "Context is set to Folder: \"{}\" and Task: \"{}\"".format( + folder_path, task_name ) ) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py index ab30e3dc10..fe5e148b7b 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -25,8 +25,9 @@ from ayon_core.hosts.tvpaint.lib import ( ) -class ExtractSequence(pyblish.api.Extractor): +class ExtractSequence(pyblish.api.InstancePlugin): label = "Extract Sequence" + order = pyblish.api.ExtractorOrder hosts = ["tvpaint"] families = ["review", "render"] diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml index 83753b3410..bba0104c54 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/help/validate_asset_name.xml @@ -1,14 +1,14 @@ -Subset context +Product context ## Invalid product context Context of the given product doesn't match your current scene. ### How to repair? -Yout can fix this with "Repair" button on the right. This will use '{expected_asset}' asset name and overwrite '{found_asset}' asset name in scene metadata. +Yout can fix this with "Repair" button on the right. This will use '{expected_folder}' folder path and overwrite '{found_folder}' folder path in scene metadata. After that restart publishing with Reload button. diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py index 927d601e34..764c090720 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_asset_name.py @@ -9,8 +9,8 @@ from ayon_core.hosts.tvpaint.api.pipeline import ( ) -class FixAssetNames(pyblish.api.Action): - """Repair the asset names. +class FixFolderPaths(pyblish.api.Action): + """Repair the folder paths. Change instanace metadata in the workfile. """ @@ -20,16 +20,16 @@ class FixAssetNames(pyblish.api.Action): on = "failed" def process(self, context, plugin): - context_asset_name = context.data["folderPath"] + context_folder_path = context.data["folderPath"] old_instance_items = list_instances() new_instance_items = [] for instance_item in old_instance_items: - instance_asset_name = instance_item.get("folderPath") + instance_folder_path = instance_item.get("folderPath") if ( - instance_asset_name - and instance_asset_name != context_asset_name + instance_folder_path + and instance_folder_path != context_folder_path ): - instance_item["folderPath"] = context_asset_name + instance_item["folderPath"] = context_folder_path new_instance_items.append(instance_item) write_instances(new_instance_items) @@ -38,23 +38,23 @@ class ValidateAssetName( OptionalPyblishPluginMixin, pyblish.api.ContextPlugin ): - """Validate asset name present on instance. + """Validate folder path present on instance. - Asset name on instance should be the same as context's. + Folder path on instance should be the same as context's. """ - label = "Validate Asset Names" + label = "Validate Folder Paths" order = pyblish.api.ValidatorOrder hosts = ["tvpaint"] - actions = [FixAssetNames] + actions = [FixFolderPaths] def process(self, context): if not self.is_active(context.data): return - context_asset_name = context.data["folderPath"] + context_folder_path = context.data["folderPath"] for instance in context: - asset_name = instance.data.get("folderPath") - if asset_name and asset_name == context_asset_name: + folder_path = instance.data.get("folderPath") + if folder_path and folder_path == context_folder_path: continue instance_label = ( @@ -64,14 +64,14 @@ class ValidateAssetName( raise PublishXmlValidationError( self, ( - "Different asset name on instance then context's." - " Instance \"{}\" has asset name: \"{}\"" - " Context asset name is: \"{}\"" + "Different folder path on instance then context's." + " Instance \"{}\" has folder path: \"{}\"" + " Context folder path is: \"{}\"" ).format( - instance_label, asset_name, context_asset_name + instance_label, folder_path, context_folder_path ), formatting_data={ - "expected_asset": context_asset_name, - "found_asset": asset_name + "expected_folder": context_folder_path, + "found_folder": folder_path } ) diff --git a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_scene_settings.py b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_scene_settings.py index 2268e59d88..5e42b5ab2f 100644 --- a/client/ayon_core/hosts/tvpaint/plugins/publish/validate_scene_settings.py +++ b/client/ayon_core/hosts/tvpaint/plugins/publish/validate_scene_settings.py @@ -22,7 +22,7 @@ class ValidateProjectSettings( if not self.is_active(context.data): return - expected_data = context.data["assetEntity"]["data"] + folder_attributes = context.data["folderEntity"]["attrib"] scene_data = { "fps": context.data.get("sceneFps"), "resolutionWidth": context.data.get("sceneWidth"), @@ -31,7 +31,7 @@ class ValidateProjectSettings( } invalid = {} for k in scene_data.keys(): - expected_value = expected_data[k] + expected_value = folder_attributes[k] if scene_data[k] != expected_value: invalid[k] = { "current": scene_data[k], "expected": expected_value @@ -46,13 +46,13 @@ class ValidateProjectSettings( json.dumps(invalid, sort_keys=True, indent=4) ), formatting_data={ - "expected_fps": expected_data["fps"], + "expected_fps": folder_attributes["fps"], "current_fps": scene_data["fps"], - "expected_width": expected_data["resolutionWidth"], - "expected_height": expected_data["resolutionHeight"], + "expected_width": folder_attributes["resolutionWidth"], + "expected_height": folder_attributes["resolutionHeight"], "current_width": scene_data["resolutionWidth"], "current_height": scene_data["resolutionHeight"], - "expected_pixel_ratio": expected_data["pixelAspect"], + "expected_pixel_ratio": folder_attributes["pixelAspect"], "current_pixel_ratio": scene_data["pixelAspect"] } ) diff --git a/client/ayon_core/hosts/unreal/api/__init__.py b/client/ayon_core/hosts/unreal/api/__init__.py index ac6a91eae9..7e7f839f27 100644 --- a/client/ayon_core/hosts/unreal/api/__init__.py +++ b/client/ayon_core/hosts/unreal/api/__init__.py @@ -28,9 +28,11 @@ from .pipeline import ( ) __all__ = [ + "UnrealActorCreator", + "UnrealAssetCreator", + "Loader", "install", "uninstall", - "Loader", "ls", "publish", "containerise", diff --git a/client/ayon_core/hosts/unreal/api/pipeline.py b/client/ayon_core/hosts/unreal/api/pipeline.py index 922fc8abd8..a60564d5b0 100644 --- a/client/ayon_core/hosts/unreal/api/pipeline.py +++ b/client/ayon_core/hosts/unreal/api/pipeline.py @@ -4,12 +4,12 @@ import json import logging from typing import List from contextlib import contextmanager -import semver import time +import semver import pyblish.api +import ayon_api -from ayon_core.client import get_asset_by_name, get_assets from ayon_core.pipeline import ( register_loader_plugin_path, register_creator_plugin_path, @@ -98,7 +98,7 @@ class UnrealHost(HostBase, ILoadHost, IPublishHost): def install(): - """Install Unreal configuration for OpenPype.""" + """Install Unreal configuration for AYON.""" print("-=" * 40) logo = '''. . @@ -262,7 +262,7 @@ def containerise(name, namespace, nodes, context, loader=None, suffix="_CON"): "name": new_name, "namespace": namespace, "loader": str(loader), - "representation": context["representation"]["_id"], + "representation": context["representation"]["id"], } # 3 - imprint data imprint(f"{path}/{container_name}", data) @@ -601,34 +601,36 @@ def generate_sequence(h, h_dir): ) project_name = get_current_project_name() - asset_data = get_asset_by_name( + # TODO Fix this does not return folder path + folder_path = h_dir.split('/')[-1], + folder_entity = ayon_api.get_folder_by_path( project_name, - h_dir.split('/')[-1], - fields=["_id", "data.fps"] + folder_path, + fields={"id", "attrib.fps"} ) start_frames = [] end_frames = [] - elements = list(get_assets( + elements = list(ayon_api.get_folders( project_name, - parent_ids=[asset_data["_id"]], - fields=["_id", "data.clipIn", "data.clipOut"] + parent_ids=[folder_entity["id"]], + fields={"id", "attrib.clipIn", "attrib.clipOut"} )) for e in elements: - start_frames.append(e.get('data').get('clipIn')) - end_frames.append(e.get('data').get('clipOut')) + start_frames.append(e["attrib"].get("clipIn")) + end_frames.append(e["attrib"].get("clipOut")) - elements.extend(get_assets( + elements.extend(ayon_api.get_folders( project_name, - parent_ids=[e["_id"]], - fields=["_id", "data.clipIn", "data.clipOut"] + parent_ids=[e["id"]], + fields={"id", "attrib.clipIn", "attrib.clipOut"} )) min_frame = min(start_frames) max_frame = max(end_frames) - fps = asset_data.get('data').get("fps") + fps = folder_entity["attrib"].get("fps") sequence.set_display_rate( unreal.FrameRate(fps, 1.0)) diff --git a/client/ayon_core/hosts/unreal/api/tools_ui.py b/client/ayon_core/hosts/unreal/api/tools_ui.py index 084da9a0f0..efae5bb702 100644 --- a/client/ayon_core/hosts/unreal/api/tools_ui.py +++ b/client/ayon_core/hosts/unreal/api/tools_ui.py @@ -125,7 +125,7 @@ class WindowCache: @classmethod def _before_show(cls): - """Create QApplication if does not exists yet.""" + """Create QApplication if does not exist yet.""" if not cls._first_show: return diff --git a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py index 0eaa1adb84..e38591f65d 100644 --- a/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/client/ayon_core/hosts/unreal/hooks/pre_workfile_preparation.py @@ -9,7 +9,7 @@ from pathlib import Path from qtpy import QtCore from ayon_core import resources -from ayon_core.lib.applications import ( +from ayon_applications import ( PreLaunchHook, ApplicationLaunchFailed, LaunchTypes, @@ -49,7 +49,7 @@ class UnrealPrelaunchHook(PreLaunchHook): # Prepare data for fill data and for getting workfile template key anatomy = self.data["anatomy"] - project_doc = self.data["project_doc"] + project_entity = self.data["project_entity"] # Use already prepared workdir data workdir_data = copy.deepcopy(self.data["workdir_data"]) @@ -61,12 +61,14 @@ class UnrealPrelaunchHook(PreLaunchHook): # Get workfile template key for current context workfile_template_key = get_workfile_template_key( + project_entity["name"], task_type, self.host_name, - project_name=project_doc["name"] ) # Fill templates - template_obj = anatomy.templates_obj[workfile_template_key]["file"] + template_obj = anatomy.get_template_item( + "work", workfile_template_key, "file" + ) # Return filename return template_obj.format_strict(workdir_data) diff --git a/client/ayon_core/hosts/unreal/integration b/client/ayon_core/hosts/unreal/integration index 6d2793170e..04b35dbf5f 160000 --- a/client/ayon_core/hosts/unreal/integration +++ b/client/ayon_core/hosts/unreal/integration @@ -1 +1 @@ -Subproject commit 6d2793170ed57187842f683a943593973abcc337 +Subproject commit 04b35dbf5fc42d905281fc30d3a22b139c1855e5 diff --git a/client/ayon_core/hosts/unreal/lib.py b/client/ayon_core/hosts/unreal/lib.py index fe9e239ed5..37122b2096 100644 --- a/client/ayon_core/hosts/unreal/lib.py +++ b/client/ayon_core/hosts/unreal/lib.py @@ -216,10 +216,8 @@ def create_unreal_project(project_name: str, since 3.16.0 """ - env = env or os.environ preset = get_project_settings(project_name)["unreal"]["project_setup"] - ue_id = ".".join(ue_version.split(".")[:2]) # get unreal engine identifier # ------------------------------------------------------------------------- # FIXME (antirotor): As of 4.26 this is problem with UE4 built from @@ -238,10 +236,12 @@ def create_unreal_project(project_name: str, project_file = pr_dir / f"{unreal_project_name}.uproject" print("--- Generating a new project ...") - commandlet_cmd = [f'{ue_editor_exe.as_posix()}', - f'{cmdlet_project.as_posix()}', - f'-run=AyonGenerateProject', - f'{project_file.resolve().as_posix()}'] + commandlet_cmd = [ + ue_editor_exe.as_posix(), + cmdlet_project.as_posix(), + "-run=AyonGenerateProject", + project_file.resolve().as_posix() + ] if dev_mode or preset["dev_mode"]: commandlet_cmd.append('-GenerateCode') @@ -268,7 +268,7 @@ def create_unreal_project(project_name: str, pf.seek(0) json.dump(pf_json, pf, indent=4) pf.truncate() - print(f'--- Engine ID has been written into the project file') + print("--- Engine ID has been written into the project file") if dev_mode or preset["dev_mode"]: u_build_tool = get_path_to_ubt(engine_path, ue_version) @@ -282,17 +282,25 @@ def create_unreal_project(project_name: str, # we need to test this out arch = "Mac" - command1 = [u_build_tool.as_posix(), "-projectfiles", - f"-project={project_file}", "-progress"] + command1 = [ + u_build_tool.as_posix(), + "-projectfiles", + f"-project={project_file}", + "-progress" + ] subprocess.run(command1) - command2 = [u_build_tool.as_posix(), - f"-ModuleWithSuffix={unreal_project_name},3555", arch, - "Development", "-TargetType=Editor", - f'-Project={project_file}', - f'{project_file}', - "-IgnoreJunk"] + command2 = [ + u_build_tool.as_posix(), + f"-ModuleWithSuffix={unreal_project_name},3555", + arch, + "Development", + "-TargetType=Editor", + f"-Project={project_file}", + project_file, + "-IgnoreJunk" + ] subprocess.run(command2) diff --git a/client/ayon_core/hosts/unreal/plugins/create/create_render.py b/client/ayon_core/hosts/unreal/plugins/create/create_render.py index cbec84c543..5a96d9809c 100644 --- a/client/ayon_core/hosts/unreal/plugins/create/create_render.py +++ b/client/ayon_core/hosts/unreal/plugins/create/create_render.py @@ -50,7 +50,7 @@ class CreateRender(UnrealAssetCreator): # If the option to create a new level sequence is selected, # create a new level sequence and a master level. - root = f"/Game/Ayon/Sequences" + root = "/Game/Ayon/Sequences" # Create a new folder for the sequence in root sequence_dir_name = create_folder(root, product_name) @@ -166,7 +166,7 @@ class CreateRender(UnrealAssetCreator): master_lvl = levels[0].get_asset().get_path_name() except IndexError: raise RuntimeError( - f"Could not find the hierarchy for the selected sequence.") + "Could not find the hierarchy for the selected sequence.") # If the selected asset is the master sequence, we get its data # and then we create the instance for the master sequence. diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py index fe13e5989b..a12f4f41b4 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_alembic_animation.py @@ -14,9 +14,9 @@ import unreal # noqa class AnimationAlembicLoader(plugin.Loader): """Load Unreal SkeletalMesh from Alembic""" - families = ["animation"] + product_types = {"animation"} label = "Import Alembic Animation" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" @@ -70,22 +70,24 @@ class AnimationAlembicLoader(plugin.Loader): # Create directory for asset and ayon container root = unreal_pipeline.AYON_ASSET_DIR - asset = context.get('asset').get('name') + folder_name = context["folder"]["name"] + folder_path = context["folder"]["path"] + product_type = context["product"]["productType"] suffix = "_CON" - if asset: - asset_name = "{}_{}".format(asset, name) + if folder_name: + asset_name = "{}_{}".format(folder_name, name) else: asset_name = "{}".format(name) - version = context.get('version') + version = context["version"]["version"] # Check if version is hero version and use different name - if not version.get("name") and version.get('type') == "hero_version": + if version < 0: name_version = f"{name}_hero" else: - name_version = f"{name}_v{version.get('name'):03d}" + name_version = f"{name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{root}/{asset}/{name_version}", suffix="") + f"{root}/{folder_name}/{name_version}", suffix="") container_name += suffix @@ -105,14 +107,17 @@ class AnimationAlembicLoader(plugin.Loader): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "folder_path": folder_path, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": context["representation"]["_id"], - "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"] + "representation": context["representation"]["id"], + "parent": context["representation"]["versionId"], + "product_type": product_type, + # TODO these should be probably removed + "asset": folder_path, + "family": product_type, } unreal_pipeline.imprint( f"{asset_dir}/{container_name}", data) @@ -126,12 +131,15 @@ class AnimationAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - name = container["asset_name"] - source_path = get_representation_path(representation) + def update(self, container, context): + folder_name = container["asset_name"] + repre_entity = context["representation"] + source_path = get_representation_path(repre_entity) destination_path = container["namespace"] - task = self.get_task(source_path, destination_path, name, True) + task = self.get_task( + source_path, destination_path, folder_name, True + ) # do import fbx and replace existing data asset_tools = unreal.AssetToolsHelpers.get_asset_tools() @@ -143,8 +151,8 @@ class AnimationAlembicLoader(plugin.Loader): unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], }) asset_content = unreal.EditorAssetLibrary.list_assets( diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py index ee5b8d9244..f6a612ce53 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_animation.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_animation.py @@ -8,7 +8,7 @@ from unreal import EditorAssetLibrary from unreal import MovieSceneSkeletalAnimationTrack from unreal import MovieSceneSkeletalAnimationSection -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.pipeline import ( get_representation_path, AYON_CONTAINER_ID @@ -20,9 +20,9 @@ from ayon_core.hosts.unreal.api import pipeline as unreal_pipeline class AnimationFBXLoader(plugin.Loader): """Load Unreal SkeletalMesh from FBX.""" - families = ["animation"] + product_types = {"animation"} label = "Import FBX Animation" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" @@ -53,7 +53,7 @@ class AnimationFBXLoader(plugin.Loader): if not actor: return None - asset_doc = get_current_project_asset(fields=["data.fps"]) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) task.set_editor_property('filename', path) task.set_editor_property('destination_path', asset_dir) @@ -82,7 +82,7 @@ class AnimationFBXLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) + 'custom_sample_rate', folder_entity.get("attrib", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( @@ -140,14 +140,17 @@ class AnimationFBXLoader(plugin.Loader): list(str): list of container content """ # Create directory for asset and Ayon container - hierarchy = context.get('asset').get('data').get('parents') root = "/Game/Ayon" - asset = context.get('asset').get('name') + folder_path = context["folder"]["path"] + hierarchy = folder_path.lstrip("/").split("/") + folder_name = hierarchy.pop(-1) + product_type = context["product"]["productType"] + suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{root}/Animations/{asset}/{name}", suffix="") + f"{root}/Animations/{folder_name}/{name}", suffix="") ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -161,7 +164,7 @@ class AnimationFBXLoader(plugin.Loader): hierarchy_dir = root for h in hierarchy: hierarchy_dir = f"{hierarchy_dir}/{h}" - hierarchy_dir = f"{hierarchy_dir}/{asset}" + hierarchy_dir = f"{hierarchy_dir}/{folder_name}" _filter = unreal.ARFilter( class_names=["World"], @@ -226,14 +229,17 @@ class AnimationFBXLoader(plugin.Loader): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": context["representation"]["_id"], - "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"] + "representation": context["representation"]["id"], + "parent": context["representation"]["versionId"], + "folder_path": folder_path, + "product_type": product_type, + # TODO these shold be probably removed + "asset": folder_path, + "family": product_type } unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data) @@ -246,10 +252,11 @@ class AnimationFBXLoader(plugin.Loader): unreal.EditorLevelLibrary.save_current_level() unreal.EditorLevelLibrary.load_level(master_level) - def update(self, container, representation): - name = container["asset_name"] - source_path = get_representation_path(representation) - asset_doc = get_current_project_asset(fields=["data.fps"]) + def update(self, container, context): + repre_entity = context["representation"] + folder_name = container["asset_name"] + source_path = get_representation_path(repre_entity) + folder_entity = get_current_folder_entity(fields=["attrib.fps"]) destination_path = container["namespace"] task = unreal.AssetImportTask() @@ -258,7 +265,7 @@ class AnimationFBXLoader(plugin.Loader): task.set_editor_property('filename', source_path) task.set_editor_property('destination_path', destination_path) # strip suffix - task.set_editor_property('destination_name', name) + task.set_editor_property('destination_name', folder_name) task.set_editor_property('replace_existing', True) task.set_editor_property('automated', True) task.set_editor_property('save', True) @@ -283,7 +290,7 @@ class AnimationFBXLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) + 'custom_sample_rate', folder_entity.get("attrib", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( @@ -305,8 +312,8 @@ class AnimationFBXLoader(plugin.Loader): unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], }) asset_content = EditorAssetLibrary.list_assets( diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_camera.py b/client/ayon_core/hosts/unreal/plugins/load/load_camera.py index 257cc2c720..681c83c6a1 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_camera.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_camera.py @@ -2,6 +2,8 @@ """Load camera from FBX.""" from pathlib import Path +import ayon_api + import unreal from unreal import ( EditorAssetLibrary, @@ -9,10 +11,10 @@ from unreal import ( EditorLevelUtils, LevelSequenceEditorBlueprintLibrary as LevelSequenceLib, ) -from ayon_core.client import get_asset_by_name from ayon_core.pipeline import ( AYON_CONTAINER_ID, get_current_project_name, + get_representation_path, ) from ayon_core.hosts.unreal.api import plugin from ayon_core.hosts.unreal.api.pipeline import ( @@ -26,9 +28,9 @@ from ayon_core.hosts.unreal.api.pipeline import ( class CameraLoader(plugin.Loader): """Load Unreal StaticMesh from FBX""" - families = ["camera"] + product_types = {"camera"} label = "Load Camera" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" @@ -83,24 +85,33 @@ class CameraLoader(plugin.Loader): """ # Create directory for asset and Ayon container - hierarchy = context.get('asset').get('data').get('parents') + folder_entity = context["folder"] + folder_attributes = folder_entity["attrib"] + folder_path = folder_entity["path"] + hierarchy_parts = folder_path.split("/") + # Remove empty string + hierarchy_parts.pop(0) + # Pop folder name + folder_name = hierarchy_parts.pop(-1) + root = "/Game/Ayon" hierarchy_dir = root hierarchy_dir_list = [] - for h in hierarchy: + for h in hierarchy_parts: hierarchy_dir = f"{hierarchy_dir}/{h}" hierarchy_dir_list.append(hierarchy_dir) - asset = context.get('asset').get('name') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" + asset_name = f"{folder_name}_{name}" if folder_name else name tools = unreal.AssetToolsHelpers().get_asset_tools() # Create a unique name for the camera directory unique_number = 1 - if EditorAssetLibrary.does_directory_exist(f"{hierarchy_dir}/{asset}"): + if EditorAssetLibrary.does_directory_exist( + f"{hierarchy_dir}/{folder_name}" + ): asset_content = EditorAssetLibrary.list_assets( - f"{root}/{asset}", recursive=False, include_folder=True + f"{root}/{folder_name}", recursive=False, include_folder=True ) # Get highest number to make a unique name @@ -113,7 +124,7 @@ class CameraLoader(plugin.Loader): unique_number = f_numbers[-1] + 1 if f_numbers else 1 asset_dir, container_name = tools.create_unique_asset_name( - f"{hierarchy_dir}/{asset}/{name}_{unique_number:02d}", suffix="") + f"{hierarchy_dir}/{folder_name}/{name}_{unique_number:02d}", suffix="") container_name += suffix @@ -122,14 +133,18 @@ class CameraLoader(plugin.Loader): # Create map for the shot, and create hierarchy of map. If the maps # already exist, we will use them. h_dir = hierarchy_dir_list[0] - h_asset = hierarchy[0] + h_asset = hierarchy_dir[0] master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" if not EditorAssetLibrary.does_asset_exist(master_level): EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map") - level = f"{asset_dir}/{asset}_map_camera.{asset}_map_camera" + level = ( + f"{asset_dir}/{folder_name}_map_camera.{folder_name}_map_camera" + ) if not EditorAssetLibrary.does_asset_exist(level): - EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map_camera") + EditorLevelLibrary.new_level( + f"{asset_dir}/{folder_name}_map_camera" + ) EditorLevelLibrary.load_level(master_level) EditorLevelUtils.add_level_to_world( @@ -144,7 +159,7 @@ class CameraLoader(plugin.Loader): # they don't exist. frame_ranges = [] sequences = [] - for (h_dir, h) in zip(hierarchy_dir_list, hierarchy): + for (h_dir, h) in zip(hierarchy_dir_list, hierarchy_parts): root_content = EditorAssetLibrary.list_assets( h_dir, recursive=False, include_folder=False) @@ -170,7 +185,7 @@ class CameraLoader(plugin.Loader): EditorAssetLibrary.make_directory(asset_dir) cam_seq = tools.create_asset( - asset_name=f"{asset}_camera", + asset_name=f"{folder_name}_camera", package_path=asset_dir, asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew() @@ -184,16 +199,17 @@ class CameraLoader(plugin.Loader): frame_ranges[i + 1][0], frame_ranges[i + 1][1], [level]) - project_name = get_current_project_name() - data = get_asset_by_name(project_name, asset)["data"] + clip_in = folder_attributes.get("clipIn") + clip_out = folder_attributes.get("clipOut") + cam_seq.set_display_rate( - unreal.FrameRate(data.get("fps"), 1.0)) - cam_seq.set_playback_start(data.get('clipIn')) - cam_seq.set_playback_end(data.get('clipOut') + 1) + unreal.FrameRate(folder_attributes.get("fps"), 1.0)) + cam_seq.set_playback_start(clip_in) + cam_seq.set_playback_end(clip_out + 1) set_sequence_hierarchy( sequences[-1], cam_seq, frame_ranges[-1][1], - data.get('clipIn'), data.get('clipOut'), + clip_in, clip_out, [level]) settings = unreal.MovieSceneUserImportFBXSettings() @@ -215,9 +231,7 @@ class CameraLoader(plugin.Loader): for possessable in cam_seq.get_possessables(): for tracks in possessable.get_tracks(): for section in tracks.get_sections(): - section.set_range( - data.get('clipIn'), - data.get('clipOut') + 1) + section.set_range(clip_in, clip_out + 1) for channel in section.get_all_channels(): for key in channel.get_keys(): old_time = key.get_time().get_editor_property( @@ -225,7 +239,7 @@ class CameraLoader(plugin.Loader): old_time_value = old_time.get_editor_property( 'value') new_time = old_time_value + ( - data.get('clipIn') - data.get('frameStart') + clip_in - folder_attributes.get('frameStart') ) key.set_time(unreal.FrameNumber(value=new_time)) @@ -233,17 +247,21 @@ class CameraLoader(plugin.Loader): create_container( container=container_name, path=asset_dir) + product_type = context["product"]["productType"] data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "folder_path": folder_path, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": context["representation"]["_id"], - "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"] + "representation": context["representation"]["id"], + "parent": context["representation"]["versionId"], + "product_type": product_type, + # TODO these should be probably removed + "asset": folder_name, + "family": product_type, } imprint(f"{asset_dir}/{container_name}", data) @@ -260,7 +278,7 @@ class CameraLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() curr_level_sequence = LevelSequenceLib.get_current_level_sequence() @@ -379,27 +397,33 @@ class CameraLoader(plugin.Loader): sub_scene.set_sequence(new_sequence) + repre_entity = context["representation"] + repre_path = get_representation_path(repre_entity) self._import_camera( EditorLevelLibrary.get_editor_world(), new_sequence, new_sequence.get_bindings(), settings, - str(representation["data"]["path"]) + repre_path ) # Set range of all sections # Changing the range of the section is not enough. We need to change # the frame of all the keys in the section. project_name = get_current_project_name() - asset = container.get('asset') - data = get_asset_by_name(project_name, asset)["data"] + folder_path = container.get("folder_path") + if folder_path is None: + folder_path = container.get("asset") + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + folder_attributes = folder_entity["attrib"] + clip_in = folder_attributes["clipIn"] + clip_out = folder_attributes["clipOut"] + frame_start = folder_attributes["frameStart"] for possessable in new_sequence.get_possessables(): for tracks in possessable.get_tracks(): for section in tracks.get_sections(): - section.set_range( - data.get('clipIn'), - data.get('clipOut') + 1) + section.set_range(clip_in, clip_out + 1) for channel in section.get_all_channels(): for key in channel.get_keys(): old_time = key.get_time().get_editor_property( @@ -407,13 +431,13 @@ class CameraLoader(plugin.Loader): old_time_value = old_time.get_editor_property( 'value') new_time = old_time_value + ( - data.get('clipIn') - data.get('frameStart') + clip_in - frame_start ) key.set_time(unreal.FrameNumber(value=new_time)) data = { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], } imprint(f"{asset_dir}/{container.get('container_name')}", data) diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py index d7a4df1b03..ae7d41192a 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_geometrycache_abc.py @@ -19,9 +19,9 @@ import unreal # noqa class PointCacheAlembicLoader(plugin.Loader): """Load Point Cache from Alembic""" - families = ["model", "pointcache"] + product_types = {"model", "pointcache"} label = "Import Alembic Point Cache" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" @@ -84,22 +84,32 @@ class PointCacheAlembicLoader(plugin.Loader): create_container(container=container_name, path=asset_dir) def imprint( - self, asset, asset_dir, container_name, asset_name, representation, - frame_start, frame_end + self, + folder_path, + asset_dir, + container_name, + asset_name, + representation, + frame_start, + frame_end, + product_type, ): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": representation["_id"], - "parent": representation["parent"], - "family": representation["context"]["family"], + "representation": representation["id"], + "parent": representation["versionId"], "frame_start": frame_start, - "frame_end": frame_end + "frame_end": frame_end, + "product_type": product_type, + "folder_path": folder_path, + # TODO these should be probably removed + "family": product_type, + "asset": folder_path, } imprint(f"{asset_dir}/{container_name}", data) @@ -119,24 +129,28 @@ class PointCacheAlembicLoader(plugin.Loader): list(str): list of container content """ # Create directory for asset and Ayon container - asset = context.get('asset').get('name') + folder_entity = context["folder"] + folder_path = folder_entity["path"] + folder_name = folder_entity["name"] + folder_attributes = folder_entity["attrib"] + suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" + version = context["version"]["version"] # Check if version is hero version and use different name - if not version.get("name") and version.get('type') == "hero_version": + if version < 0: name_version = f"{name}_hero" else: - name_version = f"{name}_v{version.get('name'):03d}" + name_version = f"{name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix - frame_start = context.get('asset').get('data').get('frameStart') - frame_end = context.get('asset').get('data').get('frameEnd') + frame_start = folder_attributes.get("frameStart") + frame_end = folder_attributes.get("frameEnd") # If frame start and end are the same, we increase the end frame by # one, otherwise Unreal will not import it @@ -151,8 +165,15 @@ class PointCacheAlembicLoader(plugin.Loader): frame_start, frame_end) self.imprint( - asset, asset_dir, container_name, asset_name, - context["representation"], frame_start, frame_end) + folder_path, + asset_dir, + container_name, + asset_name, + context["representation"], + frame_start, + frame_end, + context["product"]["productType"] + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True @@ -163,25 +184,28 @@ class PointCacheAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) + def update(self, container, context): + # Create directory for folder and Ayon container + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + product_type = context["product"]["productType"] + version = context["version"]["version"] + repre_entity = context["representation"] - unreal.log_warning(context) - - if not context: - raise RuntimeError("No context found in representation") - - # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" + # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix @@ -189,15 +213,22 @@ class PointCacheAlembicLoader(plugin.Loader): frame_end = int(container.get("frame_end")) if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_entity) self.import_and_containerize( path, asset_dir, asset_name, container_name, frame_start, frame_end) self.imprint( - asset, asset_dir, container_name, asset_name, representation, - frame_start, frame_end) + folder_path, + asset_dir, + container_name, + asset_name, + repre_entity, + frame_start, + frame_end, + product_type + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py index 1a17268d1c..49d95c6459 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout.py @@ -15,8 +15,8 @@ from unreal import ( MovieSceneSubTrack, LevelSequenceEditorBlueprintLibrary as LevelSequenceLib, ) +import ayon_api -from ayon_core.client import get_asset_by_name, get_representations from ayon_core.pipeline import ( discover_loader_plugins, loaders_from_representation, @@ -25,7 +25,7 @@ from ayon_core.pipeline import ( AYON_CONTAINER_ID, get_current_project_name, ) -from ayon_core.pipeline.context_tools import get_current_project_asset +from ayon_core.pipeline.context_tools import get_current_folder_entity from ayon_core.settings import get_current_project_settings from ayon_core.hosts.unreal.api import plugin from ayon_core.hosts.unreal.api.pipeline import ( @@ -40,8 +40,8 @@ from ayon_core.hosts.unreal.api.pipeline import ( class LayoutLoader(plugin.Loader): """Load Layout from a JSON file""" - families = ["layout"] - representations = ["json"] + product_types = {"layout"} + representations = {"json"} label = "Load Layout" icon = "code-fork" @@ -169,7 +169,7 @@ class LayoutLoader(plugin.Loader): anim_path = f"{asset_dir}/animations/{anim_file_name}" - asset_doc = get_current_project_asset() + folder_entity = get_current_folder_entity() # Import animation task = unreal.AssetImportTask() task.options = unreal.FbxImportUI() @@ -204,7 +204,7 @@ class LayoutLoader(plugin.Loader): task.options.anim_sequence_import_data.set_editor_property( 'use_default_sample_rate', False) task.options.anim_sequence_import_data.set_editor_property( - 'custom_sample_rate', asset_doc.get("data", {}).get("fps")) + 'custom_sample_rate', folder_entity.get("attrib", {}).get("fps")) task.options.anim_sequence_import_data.set_editor_property( 'import_custom_attribute', True) task.options.anim_sequence_import_data.set_editor_property( @@ -290,7 +290,7 @@ class LayoutLoader(plugin.Loader): sec_params = section.get_editor_property('params') sec_params.set_editor_property('animation', animation) - def _get_repre_docs_by_version_id(self, data): + def _get_repre_entities_by_version_id(self, data): version_ids = { element.get("version") for element in data @@ -303,15 +303,15 @@ class LayoutLoader(plugin.Loader): return output project_name = get_current_project_name() - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, - representation_names=["fbx", "abc"], + representation_names={"fbx", "abc"}, version_ids=version_ids, - fields=["_id", "parent", "name"] + fields={"id", "versionId", "name"} ) - for repre_doc in repre_docs: - version_id = str(repre_doc["parent"]) - output[version_id].append(repre_doc) + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + output[version_id].append(repre_entity) return output def _process(self, lib_path, asset_dir, sequence, repr_loaded=None): @@ -333,47 +333,50 @@ class LayoutLoader(plugin.Loader): loaded_assets = [] - repre_docs_by_version_id = self._get_repre_docs_by_version_id(data) + repre_entities_by_version_id = self._get_repre_entities_by_version_id( + data + ) for element in data: - representation = None + repre_id = None repr_format = None if element.get('representation'): - repre_docs = repre_docs_by_version_id[element.get("version")] - if not repre_docs: + version_id = element.get("version") + repre_entities = repre_entities_by_version_id[version_id] + if not repre_entities: self.log.error( - f"No valid representation found for version " - f"{element.get('version')}") + f"No valid representation found for version" + f" {version_id}") continue - repre_doc = repre_docs[0] - representation = str(repre_doc["_id"]) - repr_format = repre_doc["name"] + repre_entity = repre_entities[0] + repre_id = repre_entity["id"] + repr_format = repre_entity["name"] # This is to keep compatibility with old versions of the # json format. elif element.get('reference_fbx'): - representation = element.get('reference_fbx') + repre_id = element.get('reference_fbx') repr_format = 'fbx' elif element.get('reference_abc'): - representation = element.get('reference_abc') + repre_id = element.get('reference_abc') repr_format = 'abc' # If reference is None, this element is skipped, as it cannot be # imported in Unreal - if not representation: + if not repre_id: continue instance_name = element.get('instance_name') skeleton = None - if representation not in repr_loaded: - repr_loaded.append(representation) + if repre_id not in repr_loaded: + repr_loaded.append(repre_id) product_type = element.get("product_type") if product_type is None: product_type = element.get("family") loaders = loaders_from_representation( - all_loaders, representation) + all_loaders, repre_id) loader = None @@ -384,7 +387,7 @@ class LayoutLoader(plugin.Loader): if not loader: self.log.error( - f"No valid loader found for {representation}") + f"No valid loader found for {repre_id}") continue options = { @@ -393,7 +396,7 @@ class LayoutLoader(plugin.Loader): assets = load_container( loader, - representation, + repre_id, namespace=instance_name, options=options ) @@ -413,8 +416,8 @@ class LayoutLoader(plugin.Loader): item for item in data if ((item.get('version') and item.get('version') == element.get('version')) or - item.get('reference_fbx') == representation or - item.get('reference_abc') == representation)] + item.get('reference_fbx') == repre_id or + item.get('reference_abc') == repre_id)] for instance in instances: # transform = instance.get('transform') @@ -438,9 +441,9 @@ class LayoutLoader(plugin.Loader): bindings_dict[inst] = bindings if skeleton: - skeleton_dict[representation] = skeleton + skeleton_dict[repre_id] = skeleton else: - skeleton = skeleton_dict.get(representation) + skeleton = skeleton_dict.get(repre_id) animation_file = element.get('animation') @@ -518,20 +521,25 @@ class LayoutLoader(plugin.Loader): create_sequences = data["unreal"]["level_sequences_for_layouts"] # Create directory for asset and Ayon container - hierarchy = context.get('asset').get('data').get('parents') + folder_entity = context["folder"] + folder_path = folder_entity["path"] + hierarchy = folder_path.lstrip("/").split("/") + # Remove folder name + folder_name = hierarchy.pop(-1) root = self.ASSET_ROOT hierarchy_dir = root hierarchy_dir_list = [] for h in hierarchy: hierarchy_dir = f"{hierarchy_dir}/{h}" hierarchy_dir_list.append(hierarchy_dir) - asset = context.get('asset').get('name') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else name + asset_name = f"{folder_name}_{name}" if folder_name else name tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - "{}/{}/{}".format(hierarchy_dir, asset, name), suffix="") + "{}/{}/{}".format(hierarchy_dir, folder_name, name), + suffix="" + ) container_name += suffix @@ -541,8 +549,8 @@ class LayoutLoader(plugin.Loader): shot = None sequences = [] - level = f"{asset_dir}/{asset}_map.{asset}_map" - EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map") + level = f"{asset_dir}/{folder_name}_map.{folder_name}_map" + EditorLevelLibrary.new_level(f"{asset_dir}/{folder_name}_map") if create_sequences: # Create map for the shot, and create hierarchy of map. If the @@ -591,7 +599,7 @@ class LayoutLoader(plugin.Loader): e.get_asset().get_playback_end())) shot = tools.create_asset( - asset_name=asset, + asset_name=folder_name, package_path=asset_dir, asset_class=unreal.LevelSequence, factory=unreal.LevelSequenceFactoryNew() @@ -606,16 +614,24 @@ class LayoutLoader(plugin.Loader): [level]) project_name = get_current_project_name() - data = get_asset_by_name(project_name, asset)["data"] + folder_attributes = ( + ayon_api.get_folder_by_path(project_name, folder_path)["attrib"] + ) shot.set_display_rate( - unreal.FrameRate(data.get("fps"), 1.0)) + unreal.FrameRate(folder_attributes.get("fps"), 1.0)) shot.set_playback_start(0) - shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1) + shot.set_playback_end( + folder_attributes.get('clipOut') + - folder_attributes.get('clipIn') + + 1 + ) if sequences: set_sequence_hierarchy( - sequences[-1], shot, + sequences[-1], + shot, frame_ranges[-1][1], - data.get('clipIn'), data.get('clipOut'), + folder_attributes.get('clipIn'), + folder_attributes.get('clipOut'), [level]) EditorLevelLibrary.load_level(level) @@ -635,14 +651,15 @@ class LayoutLoader(plugin.Loader): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "asset": folder_name, + "folder_path": folder_path, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": context["representation"]["_id"], - "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"], + "representation": context["representation"]["id"], + "parent": context["representation"]["versionId"], + "family": context["product"]["productType"], "loaded_assets": loaded_assets } imprint( @@ -661,7 +678,7 @@ class LayoutLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): data = get_current_project_settings() create_sequences = data["unreal"]["level_sequences_for_layouts"] @@ -677,16 +694,19 @@ class LayoutLoader(plugin.Loader): root = "/Game/Ayon" asset_dir = container.get('namespace') - context = representation.get("context") - hierarchy = context.get('hierarchy').split("/") + folder_entity = context["folder"] + repre_entity = context["representation"] + + hierarchy = folder_entity["path"].lstrip("/").split("/") + first_parent_name = hierarchy[0] sequence = None master_level = None if create_sequences: - h_dir = f"{root}/{hierarchy[0]}" - h_asset = hierarchy[0] + h_dir = f"{root}/{first_parent_name}" + h_asset = first_parent_name master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map" filter = unreal.ARFilter( @@ -728,21 +748,21 @@ class LayoutLoader(plugin.Loader): EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/") - source_path = get_representation_path(representation) + source_path = get_representation_path(repre_entity) loaded_assets = self._process(source_path, asset_dir, sequence) data = { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), - "loaded_assets": loaded_assets + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], + "loaded_assets": loaded_assets, } imprint( "{}/{}".format(asset_dir, container.get('container_name')), data) EditorLevelLibrary.save_current_level() - save_dir = f"{root}/{hierarchy[0]}" if create_sequences else asset_dir + save_dir = f"{root}/{first_parent_name}" if create_sequences else asset_dir asset_content = EditorAssetLibrary.list_assets( save_dir, recursive=True, include_folder=False) diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py b/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py index 94686e2a5b..f9d438367b 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_layout_existing.py @@ -3,15 +3,14 @@ from pathlib import Path import unreal from unreal import EditorLevelLibrary +import ayon_api -from ayon_core.client import get_representations from ayon_core.pipeline import ( discover_loader_plugins, loaders_from_representation, load_container, get_representation_path, AYON_CONTAINER_ID, - get_current_project_name, ) from ayon_core.hosts.unreal.api import plugin from ayon_core.hosts.unreal.api import pipeline as upipeline @@ -22,8 +21,8 @@ class ExistingLayoutLoader(plugin.Loader): Load Layout for an existing scene, and match the existing assets. """ - families = ["layout"] - representations = ["json"] + product_types = {"layout"} + representations = {"json"} label = "Load Layout on Existing Scene" icon = "code-fork" @@ -43,11 +42,15 @@ class ExistingLayoutLoader(plugin.Loader): @staticmethod def _create_container( - asset_name, asset_dir, asset, representation, parent, family + asset_name, + asset_dir, + folder_path, + representation, + version_id, + product_type ): container_name = f"{asset_name}_CON" - container = None if not unreal.EditorAssetLibrary.does_asset_exist( f"{asset_dir}/{container_name}" ): @@ -61,14 +64,17 @@ class ExistingLayoutLoader(plugin.Loader): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "folder_path": folder_path, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, # "loader": str(self.__class__.__name__), "representation": representation, - "parent": parent, - "family": family + "parent": version_id, + "product_type": product_type, + # TODO these shold be probably removed + "asset": folder_path, + "family": product_type, } upipeline.imprint( @@ -195,19 +201,19 @@ class ExistingLayoutLoader(plugin.Loader): return assets - def _get_valid_repre_docs(self, project_name, version_ids): + def _get_valid_repre_entities(self, project_name, version_ids): valid_formats = ['fbx', 'abc'] - repre_docs = list(get_representations( + repre_entities = list(ayon_api.get_representations( project_name, representation_names=valid_formats, version_ids=version_ids )) - repre_doc_by_version_id = {} - for repre_doc in repre_docs: - version_id = str(repre_doc["parent"]) - repre_doc_by_version_id[version_id] = repre_doc - return repre_doc_by_version_id + repre_entities_by_version_id = {} + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + repre_entities_by_version_id[version_id] = repre_entity + return repre_entities_by_version_id def _process(self, lib_path, project_name): ar = unreal.AssetRegistryHelpers.get_asset_registry() @@ -226,39 +232,52 @@ class ExistingLayoutLoader(plugin.Loader): repre_ids.add(repre_id) elements.append(element) - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, representation_ids=repre_ids ) - repre_docs_by_id = { - str(repre_doc["_id"]): repre_doc - for repre_doc in repre_docs + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in repre_entities } layout_data = [] version_ids = set() for element in elements: repre_id = element.get("representation") - repre_doc = repre_docs_by_id.get(repre_id) - if not repre_doc: + repre_entity = repre_entities_by_id.get(repre_id) + if not repre_entity: raise AssertionError("Representation not found") - if not (repre_doc.get('data') or repre_doc['data'].get('path')): + if not ( + repre_entity.get("attrib") + or repre_entity["attrib"].get("path") + ): raise AssertionError("Representation does not have path") - if not repre_doc.get('context'): + if not repre_entity.get('context'): raise AssertionError("Representation does not have context") - layout_data.append((repre_doc, element)) - version_ids.add(repre_doc["parent"]) + layout_data.append((repre_entity, element)) + version_ids.add(repre_entity["versionId"]) + + repre_parents_by_id = ayon_api.get_representation_parents( + project_name, repre_entities_by_id.keys() + ) # Prequery valid repre documents for all elements at once - valid_repre_doc_by_version_id = self._get_valid_repre_docs( + valid_repre_entities_by_version_id = self._get_valid_repre_entities( project_name, version_ids) containers = [] actors_matched = [] - for (repr_data, lasset) in layout_data: + for (repre_entity, lasset) in layout_data: # For every actor in the scene, check if it has a representation in # those we got from the JSON. If so, create a container for it. # Otherwise, remove it from the scene. found = False + repre_id = repre_entity["id"] + repre_parents = repre_parents_by_id[repre_id] + folder_path = repre_parents.folder["path"] + folder_name = repre_parents.folder["name"] + product_name = repre_parents.product["name"] + product_type = repre_parents.product["productType"] for actor in actors: if not actor.get_class().get_name() == 'StaticMeshActor': @@ -275,7 +294,7 @@ class ExistingLayoutLoader(plugin.Loader): path = Path(filename) if (not path.name or - path.name not in repr_data.get('data').get('path')): + path.name not in repre_entity["attrib"]["path"]): continue actor.set_actor_label(lasset.get('instance_name')) @@ -283,12 +302,13 @@ class ExistingLayoutLoader(plugin.Loader): mesh_path = Path(mesh.get_path_name()).parent.as_posix() # Create the container for the asset. - asset = repr_data.get('context').get('asset') - product_name = repr_data.get('context').get('subset') container = self._create_container( - f"{asset}_{product_name}", mesh_path, asset, - repr_data.get('_id'), repr_data.get('parent'), - repr_data.get('context').get('family') + f"{folder_name}_{product_name}", + mesh_path, + folder_path, + repre_entity["id"], + repre_entity["versionId"], + product_type ) containers.append(container) @@ -316,18 +336,18 @@ class ExistingLayoutLoader(plugin.Loader): loaded = False for container in all_containers: - repr = container.get('representation') + repre_id = container.get('representation') - if not repr == str(repr_data.get('_id')): + if not repre_id == repre_entity["id"]: continue asset_dir = container.get('namespace') - filter = unreal.ARFilter( + arfilter = unreal.ARFilter( class_names=["StaticMesh"], package_paths=[asset_dir], recursive_paths=False) - assets = ar.get_assets(filter) + assets = ar.get_assets(arfilter) for asset in assets: obj = asset.get_asset() @@ -340,8 +360,9 @@ class ExistingLayoutLoader(plugin.Loader): if loaded: continue + version_id = lasset.get('version') assets = self._load_asset( - valid_repre_doc_by_version_id.get(lasset.get('version')), + valid_repre_entities_by_version_id.get(version_id), lasset.get('representation'), lasset.get('instance_name'), lasset.get('family') @@ -370,9 +391,11 @@ class ExistingLayoutLoader(plugin.Loader): def load(self, context, name, namespace, options): print("Loading Layout and Match Assets") - asset = context.get('asset').get('name') - asset_name = f"{asset}_{name}" if asset else name - container_name = f"{asset}_{name}_CON" + folder_name = context["folder"]["name"] + folder_path = context["folder"]["path"] + product_type = context["product"]["productType"] + asset_name = f"{folder_name}_{name}" if folder_name else name + container_name = f"{folder_name}_{name}_CON" curr_level = self._get_current_level() @@ -395,29 +418,34 @@ class ExistingLayoutLoader(plugin.Loader): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "folder_path": folder_path, "namespace": curr_level_path, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": context["representation"]["_id"], - "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"], - "loaded_assets": containers + "representation": context["representation"]["id"], + "parent": context["representation"]["versionId"], + "product_type": product_type, + "loaded_assets": containers, + # TODO these shold be probably removed + "asset": folder_path, + "family": product_type, } upipeline.imprint(f"{curr_level_path}/{container_name}", data) - def update(self, container, representation): + def update(self, container, context): asset_dir = container.get('namespace') - source_path = get_representation_path(representation) - project_name = get_current_project_name() + project_name = context["project"]["name"] + repre_entity = context["representation"] + + source_path = get_representation_path(repre_entity) containers = self._process(source_path, project_name) data = { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), - "loaded_assets": containers + "representation": repre_entity["id"], + "loaded_assets": containers, + "parent": repre_entity["versionId"], } upipeline.imprint( "{}/{}".format(asset_dir, container.get('container_name')), data) diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py index 3ab6ea8ebd..dfc5d58708 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_abc.py @@ -18,9 +18,9 @@ import unreal # noqa class SkeletalMeshAlembicLoader(plugin.Loader): """Load Unreal SkeletalMesh from Alembic""" - families = ["pointcache", "skeletalMesh"] + product_types = {"pointcache", "skeletalMesh"} label = "Import Alembic Skeletal Mesh" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" @@ -73,19 +73,28 @@ class SkeletalMeshAlembicLoader(plugin.Loader): create_container(container=container_name, path=asset_dir) def imprint( - self, asset, asset_dir, container_name, asset_name, representation + self, + folder_path, + asset_dir, + container_name, + asset_name, + representation, + product_type ): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "folder_path": folder_path, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": representation["_id"], - "parent": representation["parent"], - "family": representation["context"]["family"] + "representation": representation["id"], + "parent": representation["versionId"], + "product_type": product_type, + # TODO these should be probably removed + "asset": folder_path, + "family": product_type, } imprint(f"{asset_dir}/{container_name}", data) @@ -105,15 +114,16 @@ class SkeletalMeshAlembicLoader(plugin.Loader): list(str): list of container content """ # Create directory for asset and ayon container - asset = context.get('asset').get('name') + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" + version = context["version"]["version"] # Check if version is hero version and use different name - if not version.get("name") and version.get('type') == "hero_version": + if version < 0: name_version = f"{name}_hero" else: - name_version = f"{name}_v{version.get('name'):03d}" + name_version = f"{name}_v{version:03d}" default_conversion = False if options.get("default_conversion"): @@ -121,7 +131,7 @@ class SkeletalMeshAlembicLoader(plugin.Loader): tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix @@ -131,9 +141,15 @@ class SkeletalMeshAlembicLoader(plugin.Loader): self.import_and_containerize(path, asset_dir, asset_name, container_name, default_conversion) + product_type = context["product"]["productType"] self.imprint( - asset, asset_dir, container_name, asset_name, - context["representation"]) + folder_path, + asset_dir, + container_name, + asset_name, + context["representation"], + product_type + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True @@ -144,34 +160,44 @@ class SkeletalMeshAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) + def update(self, container, context): + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + product_type = context["product"]["productType"] + version = context["version"]["version"] + repre_entity = context["representation"] - if not context: - raise RuntimeError("No context found in representation") - - # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') + # Create directory for folder and Ayon container suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_entity) self.import_and_containerize(path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_path, + asset_dir, + container_name, + asset_name, + repre_entity, + product_type, + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py index cbdb4901f8..513404ab98 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py @@ -18,9 +18,9 @@ import unreal # noqa class SkeletalMeshFBXLoader(plugin.Loader): """Load Unreal SkeletalMesh from FBX.""" - families = ["rig", "skeletalMesh"] + product_types = {"rig", "skeletalMesh"} label = "Import FBX Skeletal Mesh" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" @@ -78,19 +78,28 @@ class SkeletalMeshFBXLoader(plugin.Loader): create_container(container=container_name, path=asset_dir) def imprint( - self, asset, asset_dir, container_name, asset_name, representation + self, + folder_path, + asset_dir, + container_name, + asset_name, + representation, + product_type ): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "folder_path": folder_path, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": representation["_id"], - "parent": representation["parent"], - "family": representation["context"]["family"] + "representation": representation["id"], + "parent": representation["versionId"], + "product_type": product_type, + # TODO these should be probably removed + "asset": folder_path, + "family": product_type, } imprint(f"{asset_dir}/{container_name}", data) @@ -110,19 +119,21 @@ class SkeletalMeshFBXLoader(plugin.Loader): list(str): list of container content """ # Create directory for asset and Ayon container - asset = context.get('asset').get('name') + folder_name = context["folder"]["name"] + product_type = context["product"]["productType"] suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" + version_entity = context["version"] # Check if version is hero version and use different name - if not version.get("name") and version.get('type') == "hero_version": + version = version_entity["version"] + if version < 0: name_version = f"{name}_hero" else: - name_version = f"{name}_v{version.get('name'):03d}" + name_version = f"{name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="" + f"{self.root}/{folder_name}/{name_version}", suffix="" ) container_name += suffix @@ -134,8 +145,13 @@ class SkeletalMeshFBXLoader(plugin.Loader): path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, - context["representation"]) + folder_name, + asset_dir, + container_name, + asset_name, + context["representation"], + product_type + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True @@ -146,34 +162,44 @@ class SkeletalMeshFBXLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) - - if not context: - raise RuntimeError("No context found in representation") + def update(self, container, context): + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + product_type = context["product"]["productType"] + version = context["version"]["version"] + repre_entity = context["representation"] # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_entity) self.import_and_containerize( path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_path, + asset_dir, + container_name, + asset_name, + repre_entity, + product_type + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py index c60ad8814c..0bf6ce9eaa 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_abc.py @@ -18,9 +18,9 @@ import unreal # noqa class StaticMeshAlembicLoader(plugin.Loader): """Load Unreal StaticMesh from Alembic""" - families = ["model", "staticMesh"] + product_types = {"model", "staticMesh"} label = "Import Alembic Static Mesh" - representations = ["abc"] + representations = {"abc"} icon = "cube" color = "orange" @@ -74,19 +74,28 @@ class StaticMeshAlembicLoader(plugin.Loader): create_container(container=container_name, path=asset_dir) def imprint( - self, asset, asset_dir, container_name, asset_name, representation + self, + folder_path, + asset_dir, + container_name, + asset_name, + representation, + product_type, ): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, + "folder_path": folder_path, "namespace": asset_dir, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": representation["_id"], - "parent": representation["parent"], - "family": representation["context"]["family"] + "representation": representation["id"], + "parent": representation["versionId"], + "product_type": product_type, + # TODO these should be probably removed + "asset": folder_path, + "family": product_type } imprint(f"{asset_dir}/{container_name}", data) @@ -106,15 +115,17 @@ class StaticMeshAlembicLoader(plugin.Loader): list(str): list of container content """ # Create directory for asset and Ayon container - asset = context.get('asset').get('name') + folder_path = context["folder"]["path"] + folder_name = context["folder"]["path"] + suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" + version = context["version"]["version"] # Check if version is hero version and use different name - if not version.get("name") and version.get('type') == "hero_version": + if version < 0: name_version = f"{name}_hero" else: - name_version = f"{name}_v{version.get('name'):03d}" + name_version = f"{name}_v{version:03d}" default_conversion = False if options.get("default_conversion"): @@ -122,7 +133,7 @@ class StaticMeshAlembicLoader(plugin.Loader): tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix @@ -132,9 +143,15 @@ class StaticMeshAlembicLoader(plugin.Loader): self.import_and_containerize(path, asset_dir, asset_name, container_name, default_conversion) + product_type = context["product"]["productType"] self.imprint( - asset, asset_dir, container_name, asset_name, - context["representation"]) + folder_path, + asset_dir, + container_name, + asset_name, + context["representation"], + product_type + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False @@ -145,34 +162,45 @@ class StaticMeshAlembicLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) - - if not context: - raise RuntimeError("No context found in representation") + def update(self, container, context): + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + product_type = context["product"]["productType"] + repre_entity = context["representation"] # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" + version = context["version"]["version"] # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" + tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_entity) self.import_and_containerize(path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_path, + asset_dir, + container_name, + asset_name, + repre_entity, + product_type + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py index c9271159c4..b7bb57ac23 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_staticmesh_fbx.py @@ -18,9 +18,9 @@ import unreal # noqa class StaticMeshFBXLoader(plugin.Loader): """Load Unreal StaticMesh from FBX.""" - families = ["model", "staticMesh"] + product_types = {"model", "staticMesh"} label = "Import FBX Static Mesh" - representations = ["fbx"] + representations = {"fbx"} icon = "cube" color = "orange" @@ -66,19 +66,28 @@ class StaticMeshFBXLoader(plugin.Loader): create_container(container=container_name, path=asset_dir) def imprint( - self, asset, asset_dir, container_name, asset_name, representation + self, + folder_path, + asset_dir, + container_name, + asset_name, + repre_entity, + product_type ): data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, "namespace": asset_dir, + "folder_path": folder_path, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": representation["_id"], - "parent": representation["parent"], - "family": representation["context"]["family"] + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], + "product_type": product_type, + # TODO these shold be probably removed + "asset": folder_path, + "family": product_type, } imprint(f"{asset_dir}/{container_name}", data) @@ -98,19 +107,20 @@ class StaticMeshFBXLoader(plugin.Loader): list(str): list of container content """ # Create directory for asset and Ayon container - asset = context.get('asset').get('name') + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" + version = context["version"]["version"] # Check if version is hero version and use different name - if not version.get("name") and version.get('type') == "hero_version": + if version < 0: name_version = f"{name}_hero" else: - name_version = f"{name}_v{version.get('name'):03d}" + name_version = f"{name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="" + f"{self.root}/{folder_name}/{name_version}", suffix="" ) container_name += suffix @@ -122,8 +132,13 @@ class StaticMeshFBXLoader(plugin.Loader): path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, - context["representation"]) + folder_path, + asset_dir, + container_name, + asset_name, + context["representation"], + context["product"]["productType"] + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=True @@ -134,34 +149,44 @@ class StaticMeshFBXLoader(plugin.Loader): return asset_content - def update(self, container, representation): - context = representation.get("context", {}) - - if not context: - raise RuntimeError("No context found in representation") + def update(self, container, context): + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] + product_name = context["product"]["name"] + product_type = context["product"]["productType"] + version = context["version"]["version"] + repre_entity = context["representation"] # Create directory for asset and Ayon container - asset = context.get('asset') - name = context.get('subset') suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" - version = context.get('version') + asset_name = product_name + if folder_name: + asset_name = f"{folder_name}_{product_name}" # Check if version is hero version and use different name - name_version = f"{name}_v{version:03d}" if version else f"{name}_hero" + if version < 0: + name_version = f"{product_name}_hero" + else: + name_version = f"{product_name}_v{version:03d}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{self.root}/{asset}/{name_version}", suffix="") + f"{self.root}/{folder_name}/{name_version}", suffix="") container_name += suffix if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): - path = get_representation_path(representation) + path = get_representation_path(repre_entity) self.import_and_containerize( path, asset_dir, asset_name, container_name) self.imprint( - asset, asset_dir, container_name, asset_name, representation) + folder_path, + asset_dir, + container_name, + asset_name, + repre_entity, + product_type, + ) asset_content = unreal.EditorAssetLibrary.list_assets( asset_dir, recursive=True, include_folder=False diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py b/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py index 0898035985..63f23ecc11 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_uasset.py @@ -15,9 +15,9 @@ import unreal # noqa class UAssetLoader(plugin.Loader): """Load UAsset.""" - families = ["uasset"] + product_types = {"uasset"} label = "Load UAsset" - representations = ["uasset"] + representations = {"uasset"} icon = "cube" color = "orange" @@ -42,12 +42,13 @@ class UAssetLoader(plugin.Loader): # Create directory for asset and Ayon container root = unreal_pipeline.AYON_ASSET_DIR - asset = context.get('asset').get('name') + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{root}/{asset}/{name}", suffix="" + f"{root}/{folder_name}/{name}", suffix="" ) unique_number = 1 @@ -73,17 +74,21 @@ class UAssetLoader(plugin.Loader): unreal_pipeline.create_container( container=container_name, path=asset_dir) + product_type = context["product"]["productType"] data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, "namespace": asset_dir, + "folder_path": folder_path, "container_name": container_name, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": context["representation"]["_id"], - "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"], + "representation": context["representation"]["id"], + "parent": context["representation"]["versionId"], + "product_type": product_type, + # TODO these should be probably removed + "asset": folder_path, + "family": product_type, } unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data) @@ -96,11 +101,13 @@ class UAssetLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() asset_dir = container["namespace"] - name = representation["context"]["subset"] + + product_name = context["product"]["name"] + repre_entity = context["representation"] unique_number = container["container_name"].split("_")[-2] @@ -116,19 +123,20 @@ class UAssetLoader(plugin.Loader): if obj.get_class().get_name() != "AyonAssetContainer": unreal.EditorAssetLibrary.delete_asset(asset) - update_filepath = get_representation_path(representation) + update_filepath = get_representation_path(repre_entity) shutil.copy( update_filepath, - f"{destination_path}/{name}_{unique_number}.{self.extension}") + f"{destination_path}/{product_name}_{unique_number}.{self.extension}" + ) container_path = f'{container["namespace"]}/{container["objectName"]}' # update metadata unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]), + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], } ) @@ -156,8 +164,8 @@ class UAssetLoader(plugin.Loader): class UMapLoader(UAssetLoader): """Load Level.""" - families = ["uasset"] + product_types = {"uasset"} label = "Load Level" - representations = ["umap"] + representations = {"umap"} extension = "umap" diff --git a/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py b/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py index d21c6205fc..708fc83745 100644 --- a/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py +++ b/client/ayon_core/hosts/unreal/plugins/load/load_yeticache.py @@ -15,9 +15,9 @@ import unreal # noqa class YetiLoader(plugin.Loader): """Load Yeti Cache""" - families = ["yeticacheUE"] + product_types = {"yeticacheUE"} label = "Import Yeti" - representations = ["abc"] + representations = {"abc"} icon = "pagelines" color = "orange" @@ -87,13 +87,14 @@ class YetiLoader(plugin.Loader): # Create directory for asset and Ayon container root = unreal_pipeline.AYON_ASSET_DIR - asset = context.get('asset').get('name') + folder_path = context["folder"]["path"] + folder_name = context["folder"]["name"] suffix = "_CON" - asset_name = f"{asset}_{name}" if asset else f"{name}" + asset_name = f"{folder_name}_{name}" if folder_name else f"{name}" tools = unreal.AssetToolsHelpers().get_asset_tools() asset_dir, container_name = tools.create_unique_asset_name( - f"{root}/{asset}/{name}", suffix="") + f"{root}/{folder_name}/{name}", suffix="") unique_number = 1 while unreal.EditorAssetLibrary.does_directory_exist( @@ -116,17 +117,21 @@ class YetiLoader(plugin.Loader): unreal_pipeline.create_container( container=container_name, path=asset_dir) + product_type = context["product"]["productType"] data = { "schema": "ayon:container-2.0", "id": AYON_CONTAINER_ID, - "asset": asset, "namespace": asset_dir, "container_name": container_name, + "folder_path": folder_path, "asset_name": asset_name, "loader": str(self.__class__.__name__), - "representation": context["representation"]["_id"], - "parent": context["representation"]["parent"], - "family": context["representation"]["context"]["family"] + "representation": context["representation"]["id"], + "parent": context["representation"]["versionId"], + "product_type": product_type, + # TODO these shold be probably removed + "asset": folder_path, + "family": product_type, } unreal_pipeline.imprint(f"{asset_dir}/{container_name}", data) @@ -139,9 +144,10 @@ class YetiLoader(plugin.Loader): return asset_content - def update(self, container, representation): + def update(self, container, context): + repre_entity = context["representation"] name = container["asset_name"] - source_path = get_representation_path(representation) + source_path = get_representation_path(repre_entity) destination_path = container["namespace"] task = self.get_task(source_path, destination_path, name, True) @@ -154,8 +160,8 @@ class YetiLoader(plugin.Loader): unreal_pipeline.imprint( container_path, { - "representation": str(representation["_id"]), - "parent": str(representation["parent"]) + "representation": repre_entity["id"], + "parent": repre_entity["versionId"], }) asset_content = unreal.EditorAssetLibrary.list_assets( diff --git a/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py b/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py index ea53f221ea..ce2a03155b 100644 --- a/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py +++ b/client/ayon_core/hosts/unreal/plugins/publish/collect_render_instances.py @@ -1,12 +1,11 @@ -import os from pathlib import Path import unreal +import pyblish.api from ayon_core.pipeline import get_current_project_name from ayon_core.pipeline import Anatomy from ayon_core.hosts.unreal.api import pipeline -import pyblish.api class CollectRenderInstances(pyblish.api.InstancePlugin): diff --git a/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py b/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py index de8cf0be2a..5489057021 100644 --- a/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py +++ b/client/ayon_core/hosts/unreal/plugins/publish/extract_layout.py @@ -6,8 +6,8 @@ import math import unreal from unreal import EditorLevelLibrary as ell from unreal import EditorAssetLibrary as eal +import ayon_api -from ayon_core.client import get_representation_by_name from ayon_core.pipeline import publish @@ -60,10 +60,10 @@ class ExtractLayout(publish.Extractor): family = eal.get_metadata_tag(asset_container, "family") self.log.info("Parent: {}".format(parent_id)) - blend = get_representation_by_name( - project_name, "blend", parent_id, fields=["_id"] + blend = ayon_api.get_representation_by_name( + project_name, "blend", parent_id, fields={"id"} ) - blend_id = blend["_id"] + blend_id = blend["id"] json_element = {} json_element["reference"] = str(blend_id) diff --git a/client/ayon_core/hosts/unreal/plugins/publish/validate_sequence_frames.py b/client/ayon_core/hosts/unreal/plugins/publish/validate_sequence_frames.py index 205436ad37..85214a2b0d 100644 --- a/client/ayon_core/hosts/unreal/plugins/publish/validate_sequence_frames.py +++ b/client/ayon_core/hosts/unreal/plugins/publish/validate_sequence_frames.py @@ -22,8 +22,12 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): def process(self, instance): representations = instance.data.get("representations") + folder_attributes = ( + instance.data + .get("folderEntity", {}) + .get("attrib", {}) + ) for repr in representations: - data = instance.data.get("assetEntity", {}).get("data", {}) repr_files = repr["files"] if isinstance(repr_files, str): continue @@ -64,8 +68,8 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin): frames = frames[1:] current_range = (frames[0], frames[-1]) - required_range = (data["clipIn"], - data["clipOut"]) + required_range = (folder_attributes["clipIn"], + folder_attributes["clipOut"]) if current_range != required_range: raise PublishValidationError( diff --git a/client/ayon_core/lib/__init__.py b/client/ayon_core/lib/__init__.py index ab6a604adc..408262ca42 100644 --- a/client/ayon_core/lib/__init__.py +++ b/client/ayon_core/lib/__init__.py @@ -15,7 +15,18 @@ python_version_dir = os.path.join( sys.path.insert(0, python_version_dir) site.addsitedir(python_version_dir) - +from .local_settings import ( + IniSettingRegistry, + JSONSettingRegistry, + AYONSecureRegistry, + AYONSettingsRegistry, + OpenPypeSecureRegistry, + OpenPypeSettingsRegistry, + get_local_site_id, + get_ayon_username, + get_openpype_username, +) +from .ayon_connection import initialize_ayon_connection from .events import ( emit_event, register_event_callback @@ -70,11 +81,8 @@ from .log import ( ) from .path_templates import ( - merge_dict, - TemplateMissingKey, TemplateUnsolved, StringTemplate, - TemplatesDict, FormatObject, ) @@ -112,34 +120,6 @@ from .transcoding import ( get_rescaled_command_arguments, ) -from .local_settings import ( - IniSettingRegistry, - JSONSettingRegistry, - AYONSecureRegistry, - AYONSettingsRegistry, - OpenPypeSecureRegistry, - OpenPypeSettingsRegistry, - get_local_site_id, - get_ayon_username, - get_openpype_username, -) - -from .applications import ( - ApplicationLaunchFailed, - ApplictionExecutableNotFound, - ApplicationNotFound, - ApplicationManager, - - PreLaunchHook, - PostLaunchHook, - - EnvironmentPrepData, - prepare_app_environments, - prepare_context_environments, - get_app_environments_for_context, - apply_project_environments_value -) - from .plugin_tools import ( prepare_template_data, source_hash, @@ -156,20 +136,27 @@ from .path_tools import ( from .ayon_info import ( is_running_from_build, + is_using_ayon_console, is_staging_enabled, is_dev_mode_enabled, is_in_tests, ) - -from .connections import ( - requests_get, - requests_post -) - terminal = Terminal __all__ = [ + "IniSettingRegistry", + "JSONSettingRegistry", + "AYONSecureRegistry", + "AYONSettingsRegistry", + "OpenPypeSecureRegistry", + "OpenPypeSettingsRegistry", + "get_local_site_id", + "get_ayon_username", + "get_openpype_username", + + "initialize_ayon_connection", + "emit_event", "register_event_callback", @@ -228,28 +215,6 @@ __all__ = [ "convert_ffprobe_fps_to_float", "get_rescaled_command_arguments", - "IniSettingRegistry", - "JSONSettingRegistry", - "AYONSecureRegistry", - "AYONSettingsRegistry", - "OpenPypeSecureRegistry", - "OpenPypeSettingsRegistry", - "get_local_site_id", - "get_ayon_username", - "get_openpype_username", - - "ApplicationLaunchFailed", - "ApplictionExecutableNotFound", - "ApplicationNotFound", - "ApplicationManager", - "PreLaunchHook", - "PostLaunchHook", - "EnvironmentPrepData", - "prepare_app_environments", - "prepare_context_environments", - "get_app_environments_for_context", - "apply_project_environments_value", - "compile_list_of_regexes", "filter_profiles", @@ -264,11 +229,8 @@ __all__ = [ "get_version_from_path", "get_last_version_from_path", - "merge_dict", - "TemplateMissingKey", "TemplateUnsolved", "StringTemplate", - "TemplatesDict", "FormatObject", "terminal", @@ -280,10 +242,8 @@ __all__ = [ "Logger", "is_running_from_build", + "is_using_ayon_console", "is_staging_enabled", "is_dev_mode_enabled", "is_in_tests", - - "requests_get", - "requests_post" ] diff --git a/client/ayon_core/lib/applications.py b/client/ayon_core/lib/applications.py deleted file mode 100644 index 8f1a1d10ea..0000000000 --- a/client/ayon_core/lib/applications.py +++ /dev/null @@ -1,2023 +0,0 @@ -import os -import sys -import copy -import json -import tempfile -import platform -import collections -import inspect -import subprocess -from abc import ABCMeta, abstractmethod - -import six - -from ayon_core import AYON_CORE_ROOT -from ayon_core.client import get_asset_name_identifier -from ayon_core.settings import get_project_settings, get_studio_settings -from .log import Logger -from .profiles_filtering import filter_profiles -from .local_settings import get_ayon_username - -from .python_module_tools import ( - modules_from_path, - classes_from_module -) -from .execute import ( - find_executable, - get_linux_launcher_args -) - -_logger = None - -PLATFORM_NAMES = {"windows", "linux", "darwin"} -DEFAULT_ENV_SUBGROUP = "standard" -CUSTOM_LAUNCH_APP_GROUPS = { - "djvview" -} - - -class LaunchTypes: - """Launch types are filters for pre/post-launch hooks. - - Please use these variables in case they'll change values. - """ - - # Local launch - application is launched on local machine - local = "local" - # Farm render job - application is on farm - farm_render = "farm-render" - # Farm publish job - integration post-render job - farm_publish = "farm-publish" - # Remote launch - application is launched on remote machine from which - # can be started publishing - remote = "remote" - # Automated launch - application is launched with automated publishing - automated = "automated" - - -def parse_environments(env_data, env_group=None, platform_name=None): - """Parse environment values from settings byt group and platform. - - Data may contain up to 2 hierarchical levels of dictionaries. At the end - of the last level must be string or list. List is joined using platform - specific joiner (';' for windows and ':' for linux and mac). - - Hierarchical levels can contain keys for subgroups and platform name. - Platform specific values must be always last level of dictionary. Platform - names are "windows" (MS Windows), "linux" (any linux distribution) and - "darwin" (any MacOS distribution). - - Subgroups are helpers added mainly for standard and on farm usage. Farm - may require different environments for e.g. licence related values or - plugins. Default subgroup is "standard". - - Examples: - ``` - { - # Unchanged value - "ENV_KEY1": "value", - # Empty values are kept (unset environment variable) - "ENV_KEY2": "", - - # Join list values with ':' or ';' - "ENV_KEY3": ["value1", "value2"], - - # Environment groups - "ENV_KEY4": { - "standard": "DEMO_SERVER_URL", - "farm": "LICENCE_SERVER_URL" - }, - - # Platform specific (and only for windows and mac) - "ENV_KEY5": { - "windows": "windows value", - "darwin": ["value 1", "value 2"] - }, - - # Environment groups and platform combination - "ENV_KEY6": { - "farm": "FARM_VALUE", - "standard": { - "windows": ["value1", "value2"], - "linux": "value1", - "darwin": "" - } - } - } - ``` - """ - output = {} - if not env_data: - return output - - if not env_group: - env_group = DEFAULT_ENV_SUBGROUP - - if not platform_name: - platform_name = platform.system().lower() - - for key, value in env_data.items(): - if isinstance(value, dict): - # Look if any key is platform key - # - expect that represents environment group if does not contain - # platform keys - if not PLATFORM_NAMES.intersection(set(value.keys())): - # Skip the key if group is not available - if env_group not in value: - continue - value = value[env_group] - - # Check again if value is dictionary - # - this time there should be only platform keys - if isinstance(value, dict): - value = value.get(platform_name) - - # Check if value is list and join it's values - # QUESTION Should empty values be skipped? - if isinstance(value, (list, tuple)): - value = os.pathsep.join(value) - - # Set key to output if value is string - if isinstance(value, six.string_types): - output[key] = value - return output - - -def get_logger(): - """Global lib.applications logger getter.""" - global _logger - if _logger is None: - _logger = Logger.get_logger(__name__) - return _logger - - -class ApplicationNotFound(Exception): - """Application was not found in ApplicationManager by name.""" - - def __init__(self, app_name): - self.app_name = app_name - super(ApplicationNotFound, self).__init__( - "Application \"{}\" was not found.".format(app_name) - ) - - -class ApplictionExecutableNotFound(Exception): - """Defined executable paths are not available on the machine.""" - - def __init__(self, application): - self.application = application - details = None - if not application.executables: - msg = ( - "Executable paths for application \"{}\"({}) are not set." - ) - else: - msg = ( - "Defined executable paths for application \"{}\"({})" - " are not available on this machine." - ) - details = "Defined paths:" - for executable in application.executables: - details += "\n- " + executable.executable_path - - self.msg = msg.format(application.full_label, application.full_name) - self.details = details - - exc_mgs = str(self.msg) - if details: - # Is good idea to pass new line symbol to exception message? - exc_mgs += "\n" + details - self.exc_msg = exc_mgs - super(ApplictionExecutableNotFound, self).__init__(exc_mgs) - - -class ApplicationLaunchFailed(Exception): - """Application launch failed due to known reason. - - Message should be self explanatory as traceback won't be shown. - """ - pass - - -class ApplicationGroup: - """Hold information about application group. - - Application group wraps different versions(variants) of application. - e.g. "maya" is group and "maya_2020" is variant. - - Group hold `host_name` which is implementation name used in pype. Also - holds `enabled` if whole app group is enabled or `icon` for application - icon path in resources. - - Group has also `environment` which hold same environments for all variants. - - Args: - name (str): Groups' name. - data (dict): Group defying data loaded from settings. - manager (ApplicationManager): Manager that created the group. - """ - - def __init__(self, name, data, manager): - self.name = name - self.manager = manager - self._data = data - - self.enabled = data["enabled"] - self.label = data["label"] or None - self.icon = data["icon"] or None - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - self._environment = env - - host_name = data["host_name"] or None - self.is_host = host_name is not None - self.host_name = host_name - - settings_variants = data["variants"] - variants = {} - for variant_data in settings_variants: - app_variant = Application(variant_data, self) - variants[app_variant.name] = app_variant - - self.variants = variants - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class Application: - """Hold information about application. - - Object by itself does nothing special. - - Args: - data (dict): Data for the version containing information about - executables, variant label or if is enabled. - Only required key is `executables`. - group (ApplicationGroup): App group object that created the application - and under which application belongs. - - """ - def __init__(self, data, group): - self._data = data - name = data["name"] - label = data["label"] or name - enabled = False - if group.enabled: - enabled = data.get("enabled", True) - - if group.label: - full_label = " ".join((group.label, label)) - else: - full_label = label - env = {} - try: - env = json.loads(data["environment"]) - except Exception: - pass - - arguments = data["arguments"] - if isinstance(arguments, dict): - arguments = arguments.get(platform.system().lower()) - - if not arguments: - arguments = [] - - _executables = data["executables"].get(platform.system().lower(), []) - executables = [ - ApplicationExecutable(executable) - for executable in _executables - ] - - self.group = group - - self.name = name - self.label = label - self.enabled = enabled - self.use_python_2 = data.get("use_python_2", False) - - self.full_name = "/".join((group.name, name)) - self.full_label = full_label - self.arguments = arguments - self.executables = executables - self._environment = env - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - @property - def manager(self): - return self.group.manager - - @property - def host_name(self): - return self.group.host_name - - @property - def icon(self): - return self.group.icon - - @property - def is_host(self): - return self.group.is_host - - def find_executable(self): - """Try to find existing executable for application. - - Returns (str): Path to executable from `executables` or None if any - exists. - """ - for executable in self.executables: - if executable.exists(): - return executable - return None - - def launch(self, *args, **kwargs): - """Launch the application. - - For this purpose is used manager's launch method to keep logic at one - place. - - Arguments must match with manager's launch method. That's why *args - **kwargs are used. - - Returns: - subprocess.Popen: Return executed process as Popen object. - """ - return self.manager.launch(self.full_name, *args, **kwargs) - - -class ApplicationManager: - """Load applications and tools and store them by their full name. - - Args: - studio_settings (dict): Preloaded studio settings. When passed manager - will always use these values. Gives ability to create manager - using different settings. - """ - - def __init__(self, studio_settings=None): - self.log = Logger.get_logger(self.__class__.__name__) - - self.app_groups = {} - self.applications = {} - self.tool_groups = {} - self.tools = {} - - self._studio_settings = studio_settings - - self.refresh() - - def set_studio_settings(self, studio_settings): - """Ability to change init system settings. - - This will trigger refresh of manager. - """ - self._studio_settings = studio_settings - - self.refresh() - - def refresh(self): - """Refresh applications from settings.""" - self.app_groups.clear() - self.applications.clear() - self.tool_groups.clear() - self.tools.clear() - - if self._studio_settings is not None: - settings = copy.deepcopy(self._studio_settings) - else: - settings = get_studio_settings( - clear_metadata=False, exclude_locals=False - ) - - applications_addon_settings = settings["applications"] - - # Prepare known applications - app_defs = applications_addon_settings["applications"] - additional_apps = app_defs.pop("additional_apps") - for additional_app in additional_apps: - app_name = additional_app.pop("name") - if app_name in app_defs: - self.log.warning(( - "Additional application '{}' is already" - " in built-in applications." - ).format(app_name)) - app_defs[app_name] = additional_app - - for group_name, variant_defs in app_defs.items(): - group = ApplicationGroup(group_name, variant_defs, self) - self.app_groups[group_name] = group - for app in group: - self.applications[app.full_name] = app - - tools_definitions = applications_addon_settings["tool_groups"] - for tool_group_data in tools_definitions: - group = EnvironmentToolGroup(tool_group_data, self) - self.tool_groups[group.name] = group - for tool in group: - self.tools[tool.full_name] = tool - - def find_latest_available_variant_for_group(self, group_name): - group = self.app_groups.get(group_name) - if group is None or not group.enabled: - return None - - output = None - for _, variant in reversed(sorted(group.variants.items())): - executable = variant.find_executable() - if executable: - output = variant - break - return output - - def create_launch_context(self, app_name, **data): - """Prepare launch context for application. - - Args: - app_name (str): Name of application that should be launched. - **data (Any): Any additional data. Data may be used during - - Returns: - ApplicationLaunchContext: Launch context for application. - - Raises: - ApplicationNotFound: Application was not found by entered name. - """ - - app = self.applications.get(app_name) - if not app: - raise ApplicationNotFound(app_name) - - executable = app.find_executable() - - return ApplicationLaunchContext( - app, executable, **data - ) - - def launch_with_context(self, launch_context): - """Launch application using existing launch context. - - Args: - launch_context (ApplicationLaunchContext): Prepared launch - context. - """ - - if not launch_context.executable: - raise ApplictionExecutableNotFound(launch_context.application) - return launch_context.launch() - - def launch(self, app_name, **data): - """Launch procedure. - - For host application it's expected to contain "project_name", - "folder_path" and "task_name". - - Args: - app_name (str): Name of application that should be launched. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - - Raises: - ApplicationNotFound: Application was not found by entered - argument `app_name`. - ApplictionExecutableNotFound: Executables in application definition - were not found on this machine. - ApplicationLaunchFailed: Something important for application launch - failed. Exception should contain explanation message, - traceback should not be needed. - """ - - context = self.create_launch_context(app_name, **data) - return self.launch_with_context(context) - - - -class EnvironmentToolGroup: - """Hold information about environment tool group. - - Environment tool group may hold different variants of same tool and set - environments that are same for all of them. - - e.g. "mtoa" may have different versions but all environments except one - are same. - - Args: - data (dict): Group information with variants. - manager (ApplicationManager): Manager that creates the group. - """ - - def __init__(self, data, manager): - name = data["name"] - label = data["label"] - - self.name = name - self.label = label - self._data = data - self.manager = manager - - environment = {} - try: - environment = json.loads(data["environment"]) - except Exception: - pass - self._environment = environment - - variants = data.get("variants") or [] - variants_by_name = {} - for variant_data in variants: - tool = EnvironmentTool(variant_data, self) - variants_by_name[tool.name] = tool - self.variants = variants_by_name - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.name) - - def __iter__(self): - for variant in self.variants.values(): - yield variant - - @property - def environment(self): - return copy.deepcopy(self._environment) - - -class EnvironmentTool: - """Hold information about application tool. - - Structure of tool information. - - Args: - variant_data (dict): Variant data with environments and - host and app variant filters. - group (EnvironmentToolGroup): Name of group which wraps tool. - """ - - def __init__(self, variant_data, group): - # Backwards compatibility 3.9.1 - 3.9.2 - # - 'variant_data' contained only environments but contain also host - # and application variant filters - name = variant_data["name"] - label = variant_data["label"] - host_names = variant_data["host_names"] - app_variants = variant_data["app_variants"] - - environment = {} - try: - environment = json.loads(variant_data["environment"]) - except Exception: - pass - - self.host_names = host_names - self.app_variants = app_variants - self.name = name - self.variant_label = label - self.label = " ".join((group.label, label)) - self.group = group - - self._environment = environment - self.full_name = "/".join((group.name, name)) - - def __repr__(self): - return "<{}> - {}".format(self.__class__.__name__, self.full_name) - - @property - def environment(self): - return copy.deepcopy(self._environment) - - def is_valid_for_app(self, app): - """Is tool valid for application. - - Args: - app (Application): Application for which are prepared environments. - """ - if self.app_variants and app.full_name not in self.app_variants: - return False - - if self.host_names and app.host_name not in self.host_names: - return False - return True - - -class ApplicationExecutable: - """Representation of executable loaded from settings.""" - - def __init__(self, executable): - # Try to format executable with environments - try: - executable = executable.format(**os.environ) - except Exception: - pass - - # On MacOS check if exists path to executable when ends with `.app` - # - it is common that path will lead to "/Applications/Blender" but - # real path is "/Applications/Blender.app" - if platform.system().lower() == "darwin": - executable = self.macos_executable_prep(executable) - - self.executable_path = executable - - def __str__(self): - return self.executable_path - - def __repr__(self): - return "<{}> {}".format(self.__class__.__name__, self.executable_path) - - @staticmethod - def macos_executable_prep(executable): - """Try to find full path to executable file. - - Real executable is stored in '*.app/Contents/MacOS/'. - - Having path to '*.app' gives ability to read it's plist info and - use "CFBundleExecutable" key from plist to know what is "executable." - - Plist is stored in '*.app/Contents/Info.plist'. - - This is because some '*.app' directories don't have same permissions - as real executable. - """ - # Try to find if there is `.app` file - if not os.path.exists(executable): - _executable = executable + ".app" - if os.path.exists(_executable): - executable = _executable - - # Try to find real executable if executable has `Contents` subfolder - contents_dir = os.path.join(executable, "Contents") - if os.path.exists(contents_dir): - executable_filename = None - # Load plist file and check for bundle executable - plist_filepath = os.path.join(contents_dir, "Info.plist") - if os.path.exists(plist_filepath): - import plistlib - - if hasattr(plistlib, "load"): - with open(plist_filepath, "rb") as stream: - parsed_plist = plistlib.load(stream) - else: - parsed_plist = plistlib.readPlist(plist_filepath) - executable_filename = parsed_plist.get("CFBundleExecutable") - - if executable_filename: - executable = os.path.join( - contents_dir, "MacOS", executable_filename - ) - - return executable - - def as_args(self): - return [self.executable_path] - - def _realpath(self): - """Check if path is valid executable path.""" - # Check for executable in PATH - result = find_executable(self.executable_path) - if result is not None: - return result - - # This is not 100% validation but it is better than remove ability to - # launch .bat, .sh or extentionless files - if os.path.exists(self.executable_path): - return self.executable_path - return None - - def exists(self): - if not self.executable_path: - return False - return bool(self._realpath()) - - -class UndefinedApplicationExecutable(ApplicationExecutable): - """Some applications do not require executable path from settings. - - In that case this class is used to "fake" existing executable. - """ - def __init__(self): - pass - - def __str__(self): - return self.__class__.__name__ - - def __repr__(self): - return "<{}>".format(self.__class__.__name__) - - def as_args(self): - return [] - - def exists(self): - return True - - -@six.add_metaclass(ABCMeta) -class LaunchHook: - """Abstract base class of launch hook.""" - # Order of prelaunch hook, will be executed as last if set to None. - order = None - # List of host implementations, skipped if empty. - hosts = set() - # Set of application groups - app_groups = set() - # Set of specific application names - app_names = set() - # Set of platform availability - platforms = set() - # Set of launch types for which is available - # - if empty then is available for all launch types - # - by default has 'local' which is most common reason for launc hooks - launch_types = {LaunchTypes.local} - - def __init__(self, launch_context): - """Constructor of launch hook. - - Always should be called - """ - self.log = Logger.get_logger(self.__class__.__name__) - - self.launch_context = launch_context - - is_valid = self.class_validation(launch_context) - if is_valid: - is_valid = self.validate() - - self.is_valid = is_valid - - @classmethod - def class_validation(cls, launch_context): - """Validation of class attributes by launch context. - - Args: - launch_context (ApplicationLaunchContext): Context of launching - application. - - Returns: - bool: Is launch hook valid for the context by class attributes. - """ - if cls.platforms: - low_platforms = tuple( - _platform.lower() - for _platform in cls.platforms - ) - if platform.system().lower() not in low_platforms: - return False - - if cls.hosts: - if launch_context.host_name not in cls.hosts: - return False - - if cls.app_groups: - if launch_context.app_group.name not in cls.app_groups: - return False - - if cls.app_names: - if launch_context.app_name not in cls.app_names: - return False - - if cls.launch_types: - if launch_context.launch_type not in cls.launch_types: - return False - - return True - - @property - def data(self): - return self.launch_context.data - - @property - def application(self): - return getattr(self.launch_context, "application", None) - - @property - def manager(self): - return getattr(self.application, "manager", None) - - @property - def host_name(self): - return getattr(self.application, "host_name", None) - - @property - def app_group(self): - return getattr(self.application, "group", None) - - @property - def app_name(self): - return getattr(self.application, "full_name", None) - - @property - def addons_manager(self): - return getattr(self.launch_context, "addons_manager", None) - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_wrapper' instead. - """ - return self.addons_manager - - def validate(self): - """Optional validation of launch hook on initialization. - - Returns: - bool: Hook is valid (True) or invalid (False). - """ - # QUESTION Not sure if this method has any usable potential. - # - maybe result can be based on settings - return True - - @abstractmethod - def execute(self, *args, **kwargs): - """Abstract execute method where logic of hook is.""" - pass - - -class PreLaunchHook(LaunchHook): - """Abstract class of prelaunch hook. - - This launch hook will be processed before application is launched. - - If any exception will happen during processing the application won't be - launched. - """ - - -class PostLaunchHook(LaunchHook): - """Abstract class of postlaunch hook. - - This launch hook will be processed after application is launched. - - Nothing will happen if any exception will happen during processing. And - processing of other postlaunch hooks won't stop either. - """ - - -class ApplicationLaunchContext: - """Context of launching application. - - Main purpose of context is to prepare launch arguments and keyword - arguments for new process. Most important part of keyword arguments - preparations are environment variables. - - During the whole process is possible to use `data` attribute to store - object usable in multiple places. - - Launch arguments are strings in list. It is possible to "chain" argument - when order of them matters. That is possible to do with adding list where - order is right and should not change. - NOTE: This is recommendation, not requirement. - e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may - insert argument between `nuke.exe` and `--NukeX`. To keep them together - it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. - - Notes: - It is possible to use launch context only to prepare environment - variables. In that case `executable` may be None and can be used - 'run_prelaunch_hooks' method to run prelaunch hooks which prepare - them. - - Args: - application (Application): Application definition. - executable (ApplicationExecutable): Object with path to executable. - env_group (Optional[str]): Environment variable group. If not set - 'DEFAULT_ENV_SUBGROUP' is used. - launch_type (Optional[str]): Launch type. If not set 'local' is used. - **data (dict): Any additional data. Data may be used during - preparation to store objects usable in multiple places. - """ - - def __init__( - self, - application, - executable, - env_group=None, - launch_type=None, - **data - ): - from ayon_core.addon import AddonsManager - - # Application object - self.application = application - - self.addons_manager = AddonsManager() - - # Logger - logger_name = "{}-{}".format(self.__class__.__name__, - self.application.full_name) - self.log = Logger.get_logger(logger_name) - - self.executable = executable - - if launch_type is None: - launch_type = LaunchTypes.local - self.launch_type = launch_type - - if env_group is None: - env_group = DEFAULT_ENV_SUBGROUP - - self.env_group = env_group - - self.data = dict(data) - - launch_args = [] - if executable is not None: - launch_args = executable.as_args() - # subprocess.Popen launch arguments (first argument in constructor) - self.launch_args = launch_args - self.launch_args.extend(application.arguments) - if self.data.get("app_args"): - self.launch_args.extend(self.data.pop("app_args")) - - # Handle launch environemtns - src_env = self.data.pop("env", None) - if src_env is not None and not isinstance(src_env, dict): - self.log.warning(( - "Passed `env` kwarg has invalid type: {}. Expected: `dict`." - " Using `os.environ` instead." - ).format(str(type(src_env)))) - src_env = None - - if src_env is None: - src_env = os.environ - - ignored_env = {"QT_API", } - env = { - key: str(value) - for key, value in src_env.items() - if key not in ignored_env - } - # subprocess.Popen keyword arguments - self.kwargs = {"env": env} - - if platform.system().lower() == "windows": - # Detach new process from currently running process on Windows - flags = ( - subprocess.CREATE_NEW_PROCESS_GROUP - | subprocess.DETACHED_PROCESS - ) - self.kwargs["creationflags"] = flags - - if not sys.stdout: - self.kwargs["stdout"] = subprocess.DEVNULL - self.kwargs["stderr"] = subprocess.DEVNULL - - self.prelaunch_hooks = None - self.postlaunch_hooks = None - - self.process = None - self._prelaunch_hooks_executed = False - - @property - def env(self): - if ( - "env" not in self.kwargs - or self.kwargs["env"] is None - ): - self.kwargs["env"] = {} - return self.kwargs["env"] - - @env.setter - def env(self, value): - if not isinstance(value, dict): - raise ValueError( - "'env' attribute expect 'dict' object. Got: {}".format( - str(type(value)) - ) - ) - self.kwargs["env"] = value - - @property - def modules_manager(self): - """ - Deprecated: - Use 'addons_manager' instead. - - """ - return self.addons_manager - - def _collect_addons_launch_hook_paths(self): - """Helper to collect application launch hooks from addons. - - Module have to have implemented 'get_launch_hook_paths' method which - can expect application as argument or nothing. - - Returns: - List[str]: Paths to launch hook directories. - """ - - expected_types = (list, tuple, set) - - output = [] - for module in self.addons_manager.get_enabled_addons(): - # Skip module if does not have implemented 'get_launch_hook_paths' - func = getattr(module, "get_launch_hook_paths", None) - if func is None: - continue - - func = module.get_launch_hook_paths - if hasattr(inspect, "signature"): - sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 - else: - expect_args = len(inspect.getargspec(func)[0]) > 0 - - # Pass application argument if method expect it. - try: - if expect_args: - hook_paths = func(self.application) - else: - hook_paths = func() - except Exception: - self.log.warning( - "Failed to call 'get_launch_hook_paths'", - exc_info=True - ) - continue - - if not hook_paths: - continue - - # Convert string to list - if isinstance(hook_paths, six.string_types): - hook_paths = [hook_paths] - - # Skip invalid types - if not isinstance(hook_paths, expected_types): - self.log.warning(( - "Result of `get_launch_hook_paths`" - " has invalid type {}. Expected {}" - ).format(type(hook_paths), expected_types)) - continue - - output.extend(hook_paths) - return output - - def paths_to_launch_hooks(self): - """Directory paths where to look for launch hooks.""" - # This method has potential to be part of application manager (maybe). - paths = [] - - # TODO load additional studio paths from settings - global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") - - hooks_dirs = [ - global_hooks_dir - ] - if self.host_name: - # If host requires launch hooks and is module then launch hooks - # should be collected using 'collect_launch_hook_paths' - # - module have to implement 'get_launch_hook_paths' - host_module = self.addons_manager.get_host_addon(self.host_name) - if not host_module: - hooks_dirs.append(os.path.join( - AYON_CORE_ROOT, "hosts", self.host_name, "hooks" - )) - - for path in hooks_dirs: - if ( - os.path.exists(path) - and os.path.isdir(path) - and path not in paths - ): - paths.append(path) - - # Load modules paths - paths.extend(self._collect_addons_launch_hook_paths()) - - return paths - - def discover_launch_hooks(self, force=False): - """Load and prepare launch hooks.""" - if ( - self.prelaunch_hooks is not None - or self.postlaunch_hooks is not None - ): - if not force: - self.log.info("Launch hooks were already discovered.") - return - - self.prelaunch_hooks.clear() - self.postlaunch_hooks.clear() - - self.log.debug("Discovery of launch hooks started.") - - paths = self.paths_to_launch_hooks() - self.log.debug("Paths searched for launch hooks:\n{}".format( - "\n".join("- {}".format(path) for path in paths) - )) - - all_classes = { - "pre": [], - "post": [] - } - for path in paths: - if not os.path.exists(path): - self.log.info( - "Path to launch hooks does not exist: \"{}\"".format(path) - ) - continue - - modules, _crashed = modules_from_path(path) - for _filepath, module in modules: - all_classes["pre"].extend( - classes_from_module(PreLaunchHook, module) - ) - all_classes["post"].extend( - classes_from_module(PostLaunchHook, module) - ) - - for launch_type, classes in all_classes.items(): - hooks_with_order = [] - hooks_without_order = [] - for klass in classes: - try: - hook = klass(self) - if not hook.is_valid: - self.log.debug( - "Skipped hook invalid for current launch context: " - "{}".format(klass.__name__) - ) - continue - - if inspect.isabstract(hook): - self.log.debug("Skipped abstract hook: {}".format( - klass.__name__ - )) - continue - - # Separate hooks by pre/post class - if hook.order is None: - hooks_without_order.append(hook) - else: - hooks_with_order.append(hook) - - except Exception: - self.log.warning( - "Initialization of hook failed: " - "{}".format(klass.__name__), - exc_info=True - ) - - # Sort hooks with order by order - ordered_hooks = list(sorted( - hooks_with_order, key=lambda obj: obj.order - )) - # Extend ordered hooks with hooks without defined order - ordered_hooks.extend(hooks_without_order) - - if launch_type == "pre": - self.prelaunch_hooks = ordered_hooks - else: - self.postlaunch_hooks = ordered_hooks - - self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( - len(self.prelaunch_hooks), len(self.postlaunch_hooks) - )) - - @property - def app_name(self): - return self.application.name - - @property - def host_name(self): - return self.application.host_name - - @property - def app_group(self): - return self.application.group - - @property - def manager(self): - return self.application.manager - - def _run_process(self): - # Windows and MacOS have easier process start - low_platform = platform.system().lower() - if low_platform in ("windows", "darwin"): - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Linux uses mid process - # - it is possible that the mid process executable is not - # available for this version of AYON in that case use standard - # launch - launch_args = get_linux_launcher_args() - if launch_args is None: - return subprocess.Popen(self.launch_args, **self.kwargs) - - # Prepare data that will be passed to midprocess - # - store arguments to a json and pass path to json as last argument - # - pass environments to set - app_env = self.kwargs.pop("env", {}) - json_data = { - "args": self.launch_args, - "env": app_env - } - if app_env: - # Filter environments of subprocess - self.kwargs["env"] = { - key: value - for key, value in os.environ.items() - if key in app_env - } - - # Create temp file - json_temp = tempfile.NamedTemporaryFile( - mode="w", prefix="op_app_args", suffix=".json", delete=False - ) - json_temp.close() - json_temp_filpath = json_temp.name - with open(json_temp_filpath, "w") as stream: - json.dump(json_data, stream) - - launch_args.append(json_temp_filpath) - - # Create mid-process which will launch application - process = subprocess.Popen(launch_args, **self.kwargs) - # Wait until the process finishes - # - This is important! The process would stay in "open" state. - process.wait() - # Remove the temp file - os.remove(json_temp_filpath) - # Return process which is already terminated - return process - - def run_prelaunch_hooks(self): - """Run prelaunch hooks. - - This method will be executed only once, any future calls will skip - the processing. - """ - - if self._prelaunch_hooks_executed: - self.log.warning("Prelaunch hooks were already executed.") - return - # Discover launch hooks - self.discover_launch_hooks() - - # Execute prelaunch hooks - for prelaunch_hook in self.prelaunch_hooks: - self.log.debug("Executing prelaunch hook: {}".format( - str(prelaunch_hook.__class__.__name__) - )) - prelaunch_hook.execute() - self._prelaunch_hooks_executed = True - - def launch(self): - """Collect data for new process and then create it. - - This method must not be executed more than once. - - Returns: - subprocess.Popen: Created process as Popen object. - """ - if self.process is not None: - self.log.warning("Application was already launched.") - return - - if not self._prelaunch_hooks_executed: - self.run_prelaunch_hooks() - - self.log.debug("All prelaunch hook executed. Starting new process.") - - # Prepare subprocess args - args_len_str = "" - if isinstance(self.launch_args, str): - args = self.launch_args - else: - args = self.clear_launch_args(self.launch_args) - args_len_str = " ({})".format(len(args)) - self.log.info( - "Launching \"{}\" with args{}: {}".format( - self.application.full_name, args_len_str, args - ) - ) - self.launch_args = args - - # Run process - self.process = self._run_process() - - # Process post launch hooks - for postlaunch_hook in self.postlaunch_hooks: - self.log.debug("Executing postlaunch hook: {}".format( - str(postlaunch_hook.__class__.__name__) - )) - - # TODO how to handle errors? - # - store to variable to let them accessible? - try: - postlaunch_hook.execute() - - except Exception: - self.log.warning( - "After launch procedures were not successful.", - exc_info=True - ) - - self.log.debug("Launch of {} finished.".format( - self.application.full_name - )) - - return self.process - - @staticmethod - def clear_launch_args(args): - """Collect launch arguments to final order. - - Launch argument should be list that may contain another lists this - function will upack inner lists and keep ordering. - - ``` - # source - [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] - # result - [ arg1, arg2, arg3, arg4, arg5, arg6] - - Args: - args (list): Source arguments in list may contain inner lists. - - Return: - list: Unpacked arguments. - """ - if isinstance(args, str): - return args - all_cleared = False - while not all_cleared: - all_cleared = True - new_args = [] - for arg in args: - if isinstance(arg, (list, tuple, set)): - all_cleared = False - for _arg in arg: - new_args.append(_arg) - else: - new_args.append(arg) - args = new_args - - return args - - -class MissingRequiredKey(KeyError): - pass - - -class EnvironmentPrepData(dict): - """Helper dictionary for storin temp data during environment prep. - - Args: - data (dict): Data must contain required keys. - """ - required_keys = ( - "project_doc", "asset_doc", "task_name", "app", "anatomy" - ) - - def __init__(self, data): - for key in self.required_keys: - if key not in data: - raise MissingRequiredKey(key) - - if not data.get("log"): - data["log"] = get_logger() - - if data.get("env") is None: - data["env"] = os.environ.copy() - - project_name = data["project_doc"]["name"] - if "project_settings" not in data: - data["project_settings"] = get_project_settings(project_name) - - super(EnvironmentPrepData, self).__init__(data) - - -def get_app_environments_for_context( - project_name, - folder_path, - task_name, - app_name, - env_group=None, - launch_type=None, - env=None, - addons_manager=None -): - """Prepare environment variables by context. - Args: - project_name (str): Name of project. - folder_path (str): Folder path. - task_name (str): Name of task. - app_name (str): Name of application that is launched and can be found - by ApplicationManager. - env_group (Optional[str]): Name of environment group. If not passed - default group is used. - launch_type (Optional[str]): Type for which prelaunch hooks are - executed. - env (Optional[dict[str, str]]): Initial environment variables. - `os.environ` is used when not passed. - addons_manager (Optional[AddonsManager]): Initialized modules - manager. - - Returns: - dict: Environments for passed context and application. - """ - - # Prepare app object which can be obtained only from ApplicationManager - app_manager = ApplicationManager() - context = app_manager.create_launch_context( - app_name, - project_name=project_name, - folder_path=folder_path, - task_name=task_name, - env_group=env_group, - launch_type=launch_type, - env=env, - addons_manager=addons_manager, - modules_manager=addons_manager, - ) - context.run_prelaunch_hooks() - return context.env - - -def _merge_env(env, current_env): - """Modified function(merge) from acre module.""" - import acre - - result = current_env.copy() - for key, value in env.items(): - # Keep missing keys by not filling `missing` kwarg - value = acre.lib.partial_format(value, data=current_env) - result[key] = value - return result - - -def _add_python_version_paths(app, env, logger, addons_manager): - """Add vendor packages specific for a Python version.""" - - for addon in addons_manager.get_enabled_addons(): - addon.modify_application_launch_arguments(app, env) - - # Skip adding if host name is not set - if not app.host_name: - return - - # Add Python 2/3 modules - python_vendor_dir = os.path.join( - AYON_CORE_ROOT, - "vendor", - "python" - ) - if app.use_python_2: - pythonpath = os.path.join(python_vendor_dir, "python_2") - else: - pythonpath = os.path.join(python_vendor_dir, "python_3") - - if not os.path.exists(pythonpath): - return - - logger.debug("Adding Python version specific paths to PYTHONPATH") - python_paths = [pythonpath] - - # Load PYTHONPATH from current launch context - python_path = env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - env["PYTHONPATH"] = os.pathsep.join(python_paths) - - -def prepare_app_environments( - data, env_group=None, implementation_envs=True, addons_manager=None -): - """Modify launch environments based on launched app and context. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - import acre - - app = data["app"] - log = data["log"] - source_env = data["env"].copy() - - if addons_manager is None: - from ayon_core.addon import AddonsManager - - addons_manager = AddonsManager() - - _add_python_version_paths(app, source_env, log, addons_manager) - - # Use environments from local settings - filtered_local_envs = {} - # NOTE Overrides for environment variables are not implemented in AYON. - # project_settings = data["project_settings"] - # whitelist_envs = project_settings["general"].get("local_env_white_list") - # if whitelist_envs: - # local_settings = get_local_settings() - # local_envs = local_settings.get("environments") or {} - # filtered_local_envs = { - # key: value - # for key, value in local_envs.items() - # if key in whitelist_envs - # } - - # Apply local environment variables for already existing values - for key, value in filtered_local_envs.items(): - if key in source_env: - source_env[key] = value - - # `app_and_tool_labels` has debug purpose - app_and_tool_labels = [app.full_name] - # Environments for application - environments = [ - app.group.environment, - app.environment - ] - - asset_doc = data.get("asset_doc") - # Add tools environments - groups_by_name = {} - tool_by_group_name = collections.defaultdict(dict) - if asset_doc: - # Make sure each tool group can be added only once - for key in asset_doc["data"].get("tools_env") or []: - tool = app.manager.tools.get(key) - if not tool or not tool.is_valid_for_app(app): - continue - groups_by_name[tool.group.name] = tool.group - tool_by_group_name[tool.group.name][tool.name] = tool - - for group_name in sorted(groups_by_name.keys()): - group = groups_by_name[group_name] - environments.append(group.environment) - for tool_name in sorted(tool_by_group_name[group_name].keys()): - tool = tool_by_group_name[group_name][tool_name] - environments.append(tool.environment) - app_and_tool_labels.append(tool.full_name) - - log.debug( - "Will add environments for apps and tools: {}".format( - ", ".join(app_and_tool_labels) - ) - ) - - env_values = {} - for _env_values in environments: - if not _env_values: - continue - - # Choose right platform - tool_env = parse_environments(_env_values, env_group) - - # Apply local environment variables - # - must happen between all values because they may be used during - # merge - for key, value in filtered_local_envs.items(): - if key in tool_env: - tool_env[key] = value - - # Merge dictionaries - env_values = _merge_env(tool_env, env_values) - - merged_env = _merge_env(env_values, source_env) - - loaded_env = acre.compute(merged_env, cleanup=False) - - final_env = None - # Add host specific environments - if app.host_name and implementation_envs: - host_addon = addons_manager.get_host_addon(app.host_name) - if not host_addon: - module = __import__("ayon_core.hosts", fromlist=[app.host_name]) - host_module = getattr(module, app.host_name, None) - add_implementation_envs = None - if host_addon: - add_implementation_envs = getattr( - host_addon, "add_implementation_envs", None - ) - if add_implementation_envs: - # Function may only modify passed dict without returning value - final_env = add_implementation_envs(loaded_env, app) - - if final_env is None: - final_env = loaded_env - - keys_to_remove = set(source_env.keys()) - set(final_env.keys()) - - # Update env - data["env"].update(final_env) - for key in keys_to_remove: - data["env"].pop(key, None) - - -def apply_project_environments_value( - project_name, env, project_settings=None, env_group=None -): - """Apply project specific environments on passed environments. - - The environments are applied on passed `env` argument value so it is not - required to apply changes back. - - Args: - project_name (str): Name of project for which environments should be - received. - env (dict): Environment values on which project specific environments - will be applied. - project_settings (dict): Project settings for passed project name. - Optional if project settings are already prepared. - - Returns: - dict: Passed env values with applied project environments. - - Raises: - KeyError: If project settings do not contain keys for project specific - environments. - """ - import acre - - if project_settings is None: - project_settings = get_project_settings(project_name) - - env_value = project_settings["core"]["project_environments"] - if env_value: - env_value = json.loads(env_value) - parsed_value = parse_environments(env_value, env_group) - env.update(acre.compute( - _merge_env(parsed_value, env), - cleanup=False - )) - return env - - -def prepare_context_environments(data, env_group=None, addons_manager=None): - """Modify launch environments with context data for launched host. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - """ - - from ayon_core.pipeline.template_data import get_template_data - - # Context environments - log = data["log"] - - project_doc = data["project_doc"] - asset_doc = data["asset_doc"] - task_name = data["task_name"] - if not project_doc: - log.info( - "Skipping context environments preparation." - " Launch context does not contain required data." - ) - return - - # Load project specific environments - project_name = project_doc["name"] - project_settings = get_project_settings(project_name) - data["project_settings"] = project_settings - - app = data["app"] - context_env = { - "AYON_PROJECT_NAME": project_doc["name"], - "AYON_APP_NAME": app.full_name - } - if asset_doc: - asset_name = get_asset_name_identifier(asset_doc) - context_env["AYON_FOLDER_PATH"] = asset_name - - if task_name: - context_env["AYON_TASK_NAME"] = task_name - - log.debug( - "Context environments set:\n{}".format( - json.dumps(context_env, indent=4) - ) - ) - data["env"].update(context_env) - - # Apply project specific environments on current env value - # - apply them once the context environments are set - apply_project_environments_value( - project_name, data["env"], project_settings, env_group - ) - - if not app.is_host: - return - - data["env"]["AYON_HOST_NAME"] = app.host_name - - if not asset_doc or not task_name: - # QUESTION replace with log.info and skip workfile discovery? - # - technically it should be possible to launch host without context - raise ApplicationLaunchFailed( - "Host launch require asset and task context." - ) - - workdir_data = get_template_data( - project_doc, asset_doc, task_name, app.host_name, project_settings - ) - data["workdir_data"] = workdir_data - - anatomy = data["anatomy"] - - task_type = workdir_data["task"]["type"] - # Temp solution how to pass task type to `_prepare_last_workfile` - data["task_type"] = task_type - - try: - from ayon_core.pipeline.workfile import get_workdir_with_workdir_data - - workdir = get_workdir_with_workdir_data( - workdir_data, - anatomy.project_name, - anatomy, - project_settings=project_settings - ) - - except Exception as exc: - raise ApplicationLaunchFailed( - "Error in anatomy.format: {}".format(str(exc)) - ) - - if not os.path.exists(workdir): - log.debug( - "Creating workdir folder: \"{}\"".format(workdir) - ) - try: - os.makedirs(workdir) - except Exception as exc: - raise ApplicationLaunchFailed( - "Couldn't create workdir because: {}".format(str(exc)) - ) - - data["env"]["AYON_WORKDIR"] = workdir - - _prepare_last_workfile(data, workdir, addons_manager) - - -def _prepare_last_workfile(data, workdir, addons_manager): - """last workfile workflow preparation. - - Function check if should care about last workfile workflow and tries - to find the last workfile. Both information are stored to `data` and - environments. - - Last workfile is filled always (with version 1) even if any workfile - exists yet. - - Args: - data (EnvironmentPrepData): Dictionary where result and intermediate - result will be stored. - workdir (str): Path to folder where workfiles should be stored. - """ - - from ayon_core.addon import AddonsManager - from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS - - if not addons_manager: - addons_manager = AddonsManager() - - log = data["log"] - - _workdir_data = data.get("workdir_data") - if not _workdir_data: - log.info( - "Skipping last workfile preparation." - " Key `workdir_data` not filled." - ) - return - - app = data["app"] - workdir_data = copy.deepcopy(_workdir_data) - project_name = data["project_name"] - task_name = data["task_name"] - task_type = data["task_type"] - - start_last_workfile = data.get("start_last_workfile") - if start_last_workfile is None: - start_last_workfile = should_start_last_workfile( - project_name, app.host_name, task_name, task_type - ) - else: - log.info("Opening of last workfile was disabled by user") - - data["start_last_workfile"] = start_last_workfile - - workfile_startup = should_workfile_tool_start( - project_name, app.host_name, task_name, task_type - ) - data["workfile_startup"] = workfile_startup - - # Store boolean as "0"(False) or "1"(True) - data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( - str(int(bool(start_last_workfile))) - ) - data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( - str(int(bool(workfile_startup))) - ) - - _sub_msg = "" if start_last_workfile else " not" - log.debug( - "Last workfile should{} be opened on start.".format(_sub_msg) - ) - - # Last workfile path - last_workfile_path = data.get("last_workfile_path") or "" - if not last_workfile_path: - host_addon = addons_manager.get_host_addon(app.host_name) - if host_addon: - extensions = host_addon.get_workfile_extensions() - else: - extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) - - if extensions: - from ayon_core.pipeline.workfile import ( - get_workfile_template_key, - get_last_workfile - ) - - anatomy = data["anatomy"] - project_settings = data["project_settings"] - task_type = workdir_data["task"]["type"] - template_key = get_workfile_template_key( - task_type, - app.host_name, - project_name, - project_settings=project_settings - ) - # Find last workfile - file_template = str(anatomy.templates[template_key]["file"]) - - workdir_data.update({ - "version": 1, - "user": get_ayon_username(), - "ext": extensions[0] - }) - - last_workfile_path = get_last_workfile( - workdir, file_template, workdir_data, extensions, True - ) - - if os.path.exists(last_workfile_path): - log.debug(( - "Workfiles for launch context does not exists" - " yet but path will be set." - )) - log.debug( - "Setting last workfile path: {}".format(last_workfile_path) - ) - - data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path - data["last_workfile_path"] = last_workfile_path - - -def should_start_last_workfile( - project_name, host_name, task_name, task_type, default_output=False -): - """Define if host should start last version workfile if possible. - - Default output is `False`. Can be overridden with environment variable - `AYON_OPEN_LAST_WORKFILE`, valid values without case sensitivity are - `"0", "1", "true", "false", "yes", "no"`. - - Args: - project_name (str): Name of project. - host_name (str): Name of host which is launched. In avalon's - application context it's value stored in app definition under - key `"application_dir"`. Is not case sensitive. - task_name (str): Name of task which is used for launching the host. - Task name is not case sensitive. - - Returns: - bool: True if host should start workfile. - - """ - - project_settings = get_project_settings(project_name) - profiles = ( - project_settings - ["core"] - ["tools"] - ["Workfiles"] - ["last_workfile_on_startup"] - ) - - if not profiles: - return default_output - - filter_data = { - "tasks": task_name, - "task_types": task_type, - "hosts": host_name - } - matching_item = filter_profiles(profiles, filter_data) - - output = None - if matching_item: - output = matching_item.get("enabled") - - if output is None: - return default_output - return output - - -def should_workfile_tool_start( - project_name, host_name, task_name, task_type, default_output=False -): - """Define if host should start workfile tool at host launch. - - Default output is `False`. Can be overridden with environment variable - `AYON_WORKFILE_TOOL_ON_START`, valid values without case sensitivity are - `"0", "1", "true", "false", "yes", "no"`. - - Args: - project_name (str): Name of project. - host_name (str): Name of host which is launched. In avalon's - application context it's value stored in app definition under - key `"application_dir"`. Is not case sensitive. - task_name (str): Name of task which is used for launching the host. - Task name is not case sensitive. - - Returns: - bool: True if host should start workfile. - - """ - - project_settings = get_project_settings(project_name) - profiles = ( - project_settings - ["core"] - ["tools"] - ["Workfiles"] - ["open_workfile_tool_on_startup"] - ) - - if not profiles: - return default_output - - filter_data = { - "tasks": task_name, - "task_types": task_type, - "hosts": host_name - } - matching_item = filter_profiles(profiles, filter_data) - - output = None - if matching_item: - output = matching_item.get("enabled") - - if output is None: - return default_output - return output - - -def get_non_python_host_kwargs(kwargs, allow_console=True): - """Explicit setting of kwargs for Popen for AE/PS/Harmony. - - Expected behavior - - ayon_console opens window with logs - - ayon has stdout/stderr available for capturing - - Args: - kwargs (dict) or None - allow_console (bool): use False for inner Popen opening app itself or - it will open additional console (at least for Harmony) - """ - - if kwargs is None: - kwargs = {} - - if platform.system().lower() != "windows": - return kwargs - - executable_path = os.environ.get("AYON_EXECUTABLE") - - executable_filename = "" - if executable_path: - executable_filename = os.path.basename(executable_path) - - is_gui_executable = "ayon_console" not in executable_filename - if is_gui_executable: - kwargs.update({ - "creationflags": subprocess.CREATE_NO_WINDOW, - "stdout": subprocess.DEVNULL, - "stderr": subprocess.DEVNULL - }) - elif allow_console: - kwargs.update({ - "creationflags": subprocess.CREATE_NEW_CONSOLE - }) - return kwargs diff --git a/client/ayon_core/lib/ayon_connection.py b/client/ayon_core/lib/ayon_connection.py new file mode 100644 index 0000000000..1132d77aaa --- /dev/null +++ b/client/ayon_core/lib/ayon_connection.py @@ -0,0 +1,194 @@ +import os + +import semver +import ayon_api + +from .local_settings import get_local_site_id + + +class _Cache: + initialized = False + + +def _new_get_last_versions( + self, + project_name, + product_ids, + active=True, + fields=None, + own_attributes=False +): + """Query last version entities by product ids. + + Args: + project_name (str): Project where to look for representation. + product_ids (Iterable[str]): Product ids. + active (Optional[bool]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Optional[Iterable[str]]): fields to be queried + for representations. + own_attributes (Optional[bool]): Attribute values that are + not explicitly set on entity will have 'None' value. + + Returns: + dict[str, dict[str, Any]]: Last versions by product id. + + """ + if fields: + fields = set(fields) + fields.add("productId") + + versions = self.get_versions( + project_name, + product_ids=product_ids, + latest=True, + hero=False, + active=active, + fields=fields, + own_attributes=own_attributes + ) + return { + version["productId"]: version + for version in versions + } + + +def _new_get_last_version_by_product_id( + self, + project_name, + product_id, + active=True, + fields=None, + own_attributes=False +): + """Query last version entity by product id. + + Args: + project_name (str): Project where to look for representation. + product_id (str): Product id. + active (Optional[bool]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Optional[Iterable[str]]): fields to be queried + for representations. + own_attributes (Optional[bool]): Attribute values that are + not explicitly set on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Queried version entity or None. + + """ + versions = self.get_versions( + project_name, + product_ids=[product_id], + latest=True, + hero=False, + active=active, + fields=fields, + own_attributes=own_attributes + ) + for version in versions: + return version + return None + + +def _new_get_last_version_by_product_name( + self, + project_name, + product_name, + folder_id, + active=True, + fields=None, + own_attributes=False +): + """Query last version entity by product name and folder id. + + Args: + project_name (str): Project where to look for representation. + product_name (str): Product name. + folder_id (str): Folder id. + active (Optional[bool]): Receive active/inactive entities. + Both are returned when 'None' is passed. + fields (Optional[Iterable[str]]): fields to be queried + for representations. + own_attributes (Optional[bool]): Attribute values that are + not explicitly set on entity will have 'None' value. + + Returns: + Union[dict[str, Any], None]: Queried version entity or None. + + """ + if not folder_id: + return None + + product = self.get_product_by_name( + project_name, product_name, folder_id, fields={"id"} + ) + if not product: + return None + return self.get_last_version_by_product_id( + project_name, + product["id"], + active=active, + fields=fields, + own_attributes=own_attributes + ) + + +def initialize_ayon_connection(force=False): + """Initialize global AYON api connection. + + Create global connection in ayon_api module and set site id + and client version. + Is silently skipped if already happened. + + Args: + force (Optional[bool]): Force reinitialize connection. + Defaults to False. + + """ + if not force and _Cache.initialized: + return + + _Cache.initialized = True + ayon_api_version = ( + semver.VersionInfo.parse(ayon_api.__version__).to_tuple() + ) + # TODO remove mokey patching after when AYON api is safely updated + fix_before_1_0_2 = ayon_api_version < (1, 0, 2) + # Monkey patching to fix 'get_last_version_by_product_name' + if fix_before_1_0_2: + ayon_api.ServerAPI.get_last_versions = ( + _new_get_last_versions + ) + ayon_api.ServerAPI.get_last_version_by_product_id = ( + _new_get_last_version_by_product_id + ) + ayon_api.ServerAPI.get_last_version_by_product_name = ( + _new_get_last_version_by_product_name + ) + + site_id = get_local_site_id() + version = os.getenv("AYON_VERSION") + if ayon_api.is_connection_created(): + con = ayon_api.get_server_api_connection() + # Monkey patching to fix 'get_last_version_by_product_name' + if fix_before_1_0_2: + def _lvs_wrapper(*args, **kwargs): + return _new_get_last_versions( + con, *args, **kwargs + ) + def _lv_by_pi_wrapper(*args, **kwargs): + return _new_get_last_version_by_product_id( + con, *args, **kwargs + ) + def _lv_by_pn_wrapper(*args, **kwargs): + return _new_get_last_version_by_product_name( + con, *args, **kwargs + ) + con.get_last_versions = _lvs_wrapper + con.get_last_version_by_product_id = _lv_by_pi_wrapper + con.get_last_version_by_product_name = _lv_by_pn_wrapper + con.set_site_id(site_id) + con.set_client_version(version) + else: + ayon_api.create_connection(site_id, version) diff --git a/client/ayon_core/lib/ayon_info.py b/client/ayon_core/lib/ayon_info.py index ec37d735d8..fc09a7c90c 100644 --- a/client/ayon_core/lib/ayon_info.py +++ b/client/ayon_core/lib/ayon_info.py @@ -10,6 +10,12 @@ from .local_settings import get_local_site_id def get_ayon_launcher_version(): + """Get AYON launcher version. + + Returns: + str: Version string. + + """ version_filepath = os.path.join(os.environ["AYON_ROOT"], "version.py") if not os.path.exists(version_filepath): return None @@ -24,8 +30,8 @@ def is_running_from_build(): Returns: bool: True if running from build. - """ + """ executable_path = os.environ["AYON_EXECUTABLE"] executable_filename = os.path.basename(executable_path) if "python" in executable_filename.lower(): @@ -33,6 +39,32 @@ def is_running_from_build(): return True +def is_using_ayon_console(): + """AYON launcher console executable is used. + + This function make sense only on Windows platform. For other platforms + always returns True. True is also returned if process is running from + code. + + AYON launcher on windows has 2 executable files. First 'ayon_console.exe' + works as 'python.exe' executable, the second 'ayon.exe' works as + 'pythonw.exe' executable. The difference is way how stdout/stderr is + handled (especially when calling subprocess). + + Returns: + bool: True if console executable is used. + + """ + if ( + platform.system().lower() != "windows" + or is_running_from_build() + ): + return True + executable_path = os.environ["AYON_EXECUTABLE"] + executable_filename = os.path.basename(executable_path) + return "ayon_console" in executable_filename + + def is_staging_enabled(): return os.getenv("AYON_USE_STAGING") == "1" @@ -102,8 +134,8 @@ def get_all_current_info(): def extract_ayon_info_to_file(dirpath, filename=None): """Extract all current info to a file. - It is possible to define only directory path. Filename is concatenated with - pype version, workstation site id and timestamp. + It is possible to define only directory path. Filename is concatenated + with AYON version, workstation site id and timestamp. Args: dirpath (str): Path to directory where file will be stored. diff --git a/client/ayon_core/lib/connections.py b/client/ayon_core/lib/connections.py deleted file mode 100644 index 6a0cf4ae1c..0000000000 --- a/client/ayon_core/lib/connections.py +++ /dev/null @@ -1,38 +0,0 @@ -import requests -import os - - -def requests_post(*args, **kwargs): - """Wrap request post method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline server is - running with self-signed certificates and its certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing, and it is not recommended. - - """ - if "verify" not in kwargs: - kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) - return requests.post(*args, **kwargs) - - -def requests_get(*args, **kwargs): - """Wrap request get method. - - Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment - variable is found. This is useful when Deadline server is - running with self-signed certificates and its certificate is not - added to trusted certificates on client machines. - - Warning: - Disabling SSL certificate validation is defeating one line - of defense SSL is providing, and it is not recommended. - - """ - if "verify" not in kwargs: - kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) - return requests.get(*args, **kwargs) diff --git a/client/ayon_core/lib/local_settings.py b/client/ayon_core/lib/local_settings.py index 022f63a618..fd255c997f 100644 --- a/client/ayon_core/lib/local_settings.py +++ b/client/ayon_core/lib/local_settings.py @@ -26,8 +26,7 @@ except ImportError: import six import appdirs - -from ayon_core.client import get_ayon_server_api_connection +import ayon_api _PLACEHOLDER = object() @@ -525,7 +524,7 @@ def get_ayon_appdirs(*args): def get_local_site_id(): """Get local site identifier. - Identifier is created if does not exists yet. + Identifier is created if does not exist yet. """ # used for background syncing site_id = os.environ.get("AYON_SITE_ID") @@ -556,10 +555,9 @@ def get_ayon_username(): Returns: str: Username. - """ - con = get_ayon_server_api_connection() - return con.get_user()["name"] + """ + return ayon_api.get_user()["name"] def get_openpype_username(): diff --git a/client/ayon_core/lib/path_templates.py b/client/ayon_core/lib/path_templates.py index 9be1736abf..a766dbd9c1 100644 --- a/client/ayon_core/lib/path_templates.py +++ b/client/ayon_core/lib/path_templates.py @@ -1,8 +1,6 @@ import os import re -import copy import numbers -import collections import six @@ -12,44 +10,6 @@ SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)") OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?") -def merge_dict(main_dict, enhance_dict): - """Merges dictionaries by keys. - - Function call itself if value on key is again dictionary. - - Args: - main_dict (dict): First dict to merge second one into. - enhance_dict (dict): Second dict to be merged. - - Returns: - dict: Merged result. - - .. note:: does not overrides whole value on first found key - but only values differences from enhance_dict - - """ - for key, value in enhance_dict.items(): - if key not in main_dict: - main_dict[key] = value - elif isinstance(value, dict) and isinstance(main_dict[key], dict): - main_dict[key] = merge_dict(main_dict[key], value) - else: - main_dict[key] = value - return main_dict - - -class TemplateMissingKey(Exception): - """Exception for cases when key does not exist in template.""" - - msg = "Template key does not exist: `{}`." - - def __init__(self, parents): - parent_join = "".join(["[\"{0}\"]".format(key) for key in parents]) - super(TemplateMissingKey, self).__init__( - self.msg.format(parent_join) - ) - - class TemplateUnsolved(Exception): """Exception for unsolved template when strict is set to True.""" @@ -142,7 +102,7 @@ class StringTemplate(object): """ Figure out with whole formatting. Separate advanced keys (*Like '{project[name]}') from string which must - be formatted separatelly in case of missing or incomplete keys in data. + be formatted separately in case of missing or incomplete keys in data. Args: data (dict): Containing keys to be filled into template. @@ -240,137 +200,6 @@ class StringTemplate(object): new_parts.extend(tmp_parts[idx]) return new_parts - -class TemplatesDict(object): - def __init__(self, templates=None): - self._raw_templates = None - self._templates = None - self._objected_templates = None - self.set_templates(templates) - - def set_templates(self, templates): - if templates is None: - self._raw_templates = None - self._templates = None - self._objected_templates = None - elif isinstance(templates, dict): - self._raw_templates = copy.deepcopy(templates) - self._templates = templates - self._objected_templates = self.create_objected_templates( - templates) - else: - raise TypeError("<{}> argument must be a dict, not {}.".format( - self.__class__.__name__, str(type(templates)) - )) - - def __getitem__(self, key): - return self.objected_templates[key] - - def get(self, key, *args, **kwargs): - return self.objected_templates.get(key, *args, **kwargs) - - @property - def raw_templates(self): - return self._raw_templates - - @property - def templates(self): - return self._templates - - @property - def objected_templates(self): - return self._objected_templates - - def _create_template_object(self, template): - """Create template object from a template string. - - Separated into method to give option change class of templates. - - Args: - template (str): Template string. - - Returns: - StringTemplate: Object of template. - """ - - return StringTemplate(template) - - def create_objected_templates(self, templates): - if not isinstance(templates, dict): - raise TypeError("Expected dict object, got {}".format( - str(type(templates)) - )) - - objected_templates = copy.deepcopy(templates) - inner_queue = collections.deque() - inner_queue.append(objected_templates) - while inner_queue: - item = inner_queue.popleft() - if not isinstance(item, dict): - continue - for key in tuple(item.keys()): - value = item[key] - if isinstance(value, six.string_types): - item[key] = self._create_template_object(value) - elif isinstance(value, dict): - inner_queue.append(value) - return objected_templates - - def _format_value(self, value, data): - if isinstance(value, StringTemplate): - return value.format(data) - - if isinstance(value, dict): - return self._solve_dict(value, data) - return value - - def _solve_dict(self, templates, data): - """ Solves templates with entered data. - - Args: - templates (dict): All templates which will be formatted. - data (dict): Containing keys to be filled into template. - - Returns: - dict: With `TemplateResult` in values containing filled or - partially filled templates. - """ - output = collections.defaultdict(dict) - for key, value in templates.items(): - output[key] = self._format_value(value, data) - - return output - - def format(self, in_data, only_keys=True, strict=True): - """ Solves templates based on entered data. - - Args: - data (dict): Containing keys to be filled into template. - only_keys (bool, optional): Decides if environ will be used to - fill templates or only keys in data. - - Returns: - TemplatesResultDict: Output `TemplateResult` have `strict` - attribute set to True so accessing unfilled keys in templates - will raise exceptions with explaned error. - """ - # Create a copy of inserted data - data = copy.deepcopy(in_data) - - # Add environment variable to data - if only_keys is False: - for key, val in os.environ.items(): - env_key = "$" + key - if env_key not in data: - data[env_key] = val - - solved = self._solve_dict(self.objected_templates, data) - - output = TemplatesResultDict(solved) - output.strict = strict - return output - - class TemplateResult(str): """Result of template format with most of information in. @@ -379,8 +208,8 @@ class TemplateResult(str): only used keys. solved (bool): For check if all required keys were filled. template (str): Original template. - missing_keys (list): Missing keys that were not in the data. Include - missing optional keys. + missing_keys (Iterable[str]): Missing keys that were not in the data. + Include missing optional keys. invalid_types (dict): When key was found in data, but value had not allowed DataType. Allowed data types are `numbers`, `str`(`basestring`) and `dict`. Dictionary may cause invalid type @@ -445,99 +274,6 @@ class TemplateResult(str): ) -class TemplatesResultDict(dict): - """Holds and wrap TemplateResults for easy bug report.""" - - def __init__(self, in_data, key=None, parent=None, strict=None): - super(TemplatesResultDict, self).__init__() - for _key, _value in in_data.items(): - if isinstance(_value, dict): - _value = self.__class__(_value, _key, self) - self[_key] = _value - - self.key = key - self.parent = parent - self.strict = strict - if self.parent is None and strict is None: - self.strict = True - - def __getitem__(self, key): - if key not in self.keys(): - hier = self.hierarchy() - hier.append(key) - raise TemplateMissingKey(hier) - - value = super(TemplatesResultDict, self).__getitem__(key) - if isinstance(value, self.__class__): - return value - - # Raise exception when expected solved templates and it is not. - if self.raise_on_unsolved and hasattr(value, "validate"): - value.validate() - return value - - @property - def raise_on_unsolved(self): - """To affect this change `strict` attribute.""" - if self.strict is not None: - return self.strict - return self.parent.raise_on_unsolved - - def hierarchy(self): - """Return dictionary keys one by one to root parent.""" - if self.parent is None: - return [] - - hier_keys = [] - par_hier = self.parent.hierarchy() - if par_hier: - hier_keys.extend(par_hier) - hier_keys.append(self.key) - - return hier_keys - - @property - def missing_keys(self): - """Return missing keys of all children templates.""" - missing_keys = set() - for value in self.values(): - missing_keys |= value.missing_keys - return missing_keys - - @property - def invalid_types(self): - """Return invalid types of all children templates.""" - invalid_types = {} - for value in self.values(): - invalid_types = merge_dict(invalid_types, value.invalid_types) - return invalid_types - - @property - def used_values(self): - """Return used values for all children templates.""" - used_values = {} - for value in self.values(): - used_values = merge_dict(used_values, value.used_values) - return used_values - - def get_solved(self): - """Get only solved key from templates.""" - result = {} - for key, value in self.items(): - if isinstance(value, self.__class__): - value = value.get_solved() - if not value: - continue - result[key] = value - - elif ( - not hasattr(value, "solved") or - value.solved - ): - result[key] = value - return self.__class__(result, key=self.key, parent=self.parent) - - class TemplatePartResult: """Result to store result of template parts.""" def __init__(self, optional=False): diff --git a/client/ayon_core/lib/path_tools.py b/client/ayon_core/lib/path_tools.py index fec6a0c47d..a65f0f8e13 100644 --- a/client/ayon_core/lib/path_tools.py +++ b/client/ayon_core/lib/path_tools.py @@ -78,7 +78,7 @@ def collect_frames(files): files(list) or (set with single value): list of source paths Returns: - (dict): {'/asset/subset_v001.0001.png': '0001', ....} + dict: {'/folder/product_v001.0001.png': '0001', ....} """ patterns = [clique.PATTERNS["frames"]] diff --git a/client/ayon_core/lib/plugin_tools.py b/client/ayon_core/lib/plugin_tools.py index 5ad4da88b9..654bc7ac4a 100644 --- a/client/ayon_core/lib/plugin_tools.py +++ b/client/ayon_core/lib/plugin_tools.py @@ -94,8 +94,12 @@ def prepare_template_data(fill_pairs): output = {} for item in valid_items: keys, value = item - upper_value = value.upper() - capitalized_value = _capitalize_value(value) + # Convert only string values + if isinstance(value, str): + upper_value = value.upper() + capitalized_value = _capitalize_value(value) + else: + upper_value = capitalized_value = value first_key = keys.pop(0) if not keys: diff --git a/client/ayon_core/lib/python_module_tools.py b/client/ayon_core/lib/python_module_tools.py index 4f9eb7f667..cb6e4c14c4 100644 --- a/client/ayon_core/lib/python_module_tools.py +++ b/client/ayon_core/lib/python_module_tools.py @@ -118,8 +118,8 @@ def classes_from_module(superclass, module): Arguments: superclass (superclass): Superclass of subclasses to look for - module (types.ModuleType): Imported module from which to - parse valid Avalon plug-ins. + module (types.ModuleType): Imported module where to look for + 'superclass' subclasses. Returns: List of plug-ins, or empty list if none is found. diff --git a/client/ayon_core/lib/terminal.py b/client/ayon_core/lib/terminal.py index f822a37286..10fcc79a27 100644 --- a/client/ayon_core/lib/terminal.py +++ b/client/ayon_core/lib/terminal.py @@ -1,15 +1,5 @@ # -*- coding: utf-8 -*- """Package helping with colorizing and formatting terminal output.""" -# :: -# //. ... .. ///. //. -# ///\\\ \\\ \\ ///\\\ /// -# /// \\ \\\ \\ /// \\ /// // -# \\\ // \\\ // \\\ // \\\// ./ -# \\\// \\\// \\\// \\\' // -# \\\ \\\ \\\ \\\// -# ''' ''' ''' ''' -# ..---===[[ PyP3 Setup ]]===---... -# import re import time import threading @@ -69,7 +59,7 @@ class Terminal: Terminal.use_colors = False print( "Module `blessed` failed on import or terminal creation." - " Pype terminal won't use colors." + " AYON terminal won't use colors." ) Terminal._initialized = True return diff --git a/client/ayon_core/lib/transcoding.py b/client/ayon_core/lib/transcoding.py index 08e0bc9237..4d778c2091 100644 --- a/client/ayon_core/lib/transcoding.py +++ b/client/ayon_core/lib/transcoding.py @@ -45,7 +45,7 @@ ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$") IMAGE_EXTENSIONS = { ".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", - ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", + ".cal", ".cin", ".cpc", ".cpt", ".dds", ".dng", ".dpx", ".ecw", ".exr", ".fits", ".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer", ".icns", ".ico", ".cur", ".ics", ".ilbm", ".jbig", ".jbig2", ".jng", ".jpeg", ".jpeg-ls", ".jpeg-hdr", ".2000", ".jpg", diff --git a/client/ayon_core/modules/base.py b/client/ayon_core/modules/base.py index 8a78edf961..3f2a7d4ea5 100644 --- a/client/ayon_core/modules/base.py +++ b/client/ayon_core/modules/base.py @@ -1,3 +1,5 @@ +# Backwards compatibility support +# - TODO should be removed before release 1.0.0 from ayon_core.addon import ( AYONAddon, AddonsManager, @@ -12,3 +14,16 @@ from ayon_core.addon.base import ( ModulesManager = AddonsManager TrayModulesManager = TrayAddonsManager load_modules = load_addons + + +__all__ = ( + "AYONAddon", + "AddonsManager", + "TrayAddonsManager", + "load_addons", + "OpenPypeModule", + "OpenPypeAddOn", + "ModulesManager", + "TrayModulesManager", + "load_modules", +) diff --git a/client/ayon_core/modules/clockify/clockify_api.py b/client/ayon_core/modules/clockify/clockify_api.py index f8c9c537ee..2e1d8f008f 100644 --- a/client/ayon_core/modules/clockify/clockify_api.py +++ b/client/ayon_core/modules/clockify/clockify_api.py @@ -1,6 +1,4 @@ import os -import re -import time import json import datetime import requests diff --git a/client/ayon_core/modules/clockify/clockify_module.py b/client/ayon_core/modules/clockify/clockify_module.py index 58407bfe94..d2ee4f1e1e 100644 --- a/client/ayon_core/modules/clockify/clockify_module.py +++ b/client/ayon_core/modules/clockify/clockify_module.py @@ -3,7 +3,6 @@ import threading import time from ayon_core.modules import AYONAddon, ITrayModule, IPluginPaths -from ayon_core.client import get_asset_by_name from .constants import CLOCKIFY_FTRACK_USER_PATH, CLOCKIFY_FTRACK_SERVER_PATH @@ -255,33 +254,27 @@ class ClockifyModule(AYONAddon, ITrayModule, IPluginPaths): if not self.clockify_api.get_api_key(): return + project_name = input_data.get("project_name") + folder_path = input_data.get("folder_path") task_name = input_data.get("task_name") + task_type = input_data.get("task_type") + if not all((project_name, folder_path, task_name, task_type)): + return # Concatenate hierarchy and task to get description - description_items = list(input_data.get("hierarchy", [])) - description_items.append(task_name) - description = "/".join(description_items) + description = "/".join([folder_path.lstrip("/"), task_name]) # Check project existence - project_name = input_data.get("project_name") project_id = self._verify_project_exists(project_name) if not project_id: return # Setup timer tags - tag_ids = [] - tag_name = input_data.get("task_type") - if not tag_name: - # no task_type found in the input data - # if the timer is restarted by idle time (bug?) - asset_name = input_data["hierarchy"][-1] - asset_doc = get_asset_by_name(project_name, asset_name) - task_info = asset_doc["data"]["tasks"][task_name] - tag_name = task_info.get("type", "") - if not tag_name: - self.log.info("No tag information found for the timer") + if not task_type: + self.log.info("No tag information found for the timer") - task_tag_id = self.clockify_api.get_tag_id(tag_name) + tag_ids = [] + task_tag_id = self.clockify_api.get_tag_id(task_type) if task_tag_id is not None: tag_ids.append(task_tag_id) diff --git a/client/ayon_core/modules/clockify/ftrack/server/action_clockify_sync_server.py b/client/ayon_core/modules/clockify/ftrack/server/action_clockify_sync_server.py index 985cf49b97..7854f0ceba 100644 --- a/client/ayon_core/modules/clockify/ftrack/server/action_clockify_sync_server.py +++ b/client/ayon_core/modules/clockify/ftrack/server/action_clockify_sync_server.py @@ -11,7 +11,7 @@ class SyncClockifyServer(ServerAction): label = "Sync To Clockify (server)" description = "Synchronise data to Clockify workspace" - role_list = ["Pypeclub", "Administrator", "project Manager"] + role_list = ["Administrator", "project Manager"] def __init__(self, *args, **kwargs): super(SyncClockifyServer, self).__init__(*args, **kwargs) diff --git a/client/ayon_core/modules/clockify/ftrack/user/action_clockify_sync_local.py b/client/ayon_core/modules/clockify/ftrack/user/action_clockify_sync_local.py index 0e8cf6bd37..4701653a0b 100644 --- a/client/ayon_core/modules/clockify/ftrack/user/action_clockify_sync_local.py +++ b/client/ayon_core/modules/clockify/ftrack/user/action_clockify_sync_local.py @@ -13,7 +13,7 @@ class SyncClockifyLocal(BaseAction): #: Action description. description = 'Synchronise data to Clockify workspace' #: roles that are allowed to register this action - role_list = ["Pypeclub", "Administrator", "project Manager"] + role_list = ["Administrator", "project Manager"] #: icon icon = statics_icon("app_icons", "clockify-white.png") diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py index f7dd1772b0..8381c7d73e 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifyStart.py @@ -1,4 +1,5 @@ -from ayon_core.client import get_asset_by_name +import ayon_api + from ayon_core.pipeline import LauncherAction from openpype_modules.clockify.clockify_api import ClockifyAPI @@ -10,35 +11,27 @@ class ClockifyStart(LauncherAction): order = 500 clockify_api = ClockifyAPI() - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the action is compatible with the session""" - if "AYON_TASK_NAME" in session: - return True - return False + return selection.is_task_selected - def process(self, session, **kwargs): + def process(self, selection, **kwargs): self.clockify_api.set_api() user_id = self.clockify_api.user_id workspace_id = self.clockify_api.workspace_id - project_name = session["AYON_PROJECT_NAME"] - asset_name = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] - description = asset_name + project_name = selection.project_name + folder_path = selection.folder_path + task_name = selection.task_name + description = "/".join([folder_path.lstrip("/"), task_name]) - # fetch asset docs - asset_doc = get_asset_by_name(project_name, asset_name) + # fetch folder entity + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) # get task type to fill the timer tag - task_info = asset_doc["data"]["tasks"][task_name] - task_type = task_info["type"] - - # check if the task has hierarchy and fill the - parents_data = asset_doc["data"] - if parents_data is not None: - description_items = parents_data.get("parents", []) - description_items.append(asset_name) - description_items.append(task_name) - description = "/".join(description_items) + task_type = task_entity["taskType"] project_id = self.clockify_api.get_project_id( project_name, workspace_id diff --git a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py index 5ef9033ffe..5388f47c98 100644 --- a/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py +++ b/client/ayon_core/modules/clockify/launcher_actions/ClockifySync.py @@ -1,4 +1,5 @@ -from ayon_core.client import get_projects, get_project +import ayon_api + from openpype_modules.clockify.clockify_api import ClockifyAPI from ayon_core.pipeline import LauncherAction @@ -18,15 +19,18 @@ class ClockifySync(LauncherAction): order = 500 clockify_api = ClockifyAPI() - def is_compatible(self, session): + def is_compatible(self, selection): """Check if there's some projects to sync""" + if selection.is_project_selected: + return True + try: - next(get_projects()) + next(ayon_api.get_projects()) return True except StopIteration: return False - def process(self, session, **kwargs): + def process(self, selection, **kwargs): self.clockify_api.set_api() workspace_id = self.clockify_api.workspace_id user_id = self.clockify_api.user_id @@ -36,18 +40,19 @@ class ClockifySync(LauncherAction): raise ClockifyPermissionsCheckFailed( "Current CLockify user is missing permissions for this action!" ) - project_name = session.get("AYON_PROJECT_NAME") or "" - projects_to_sync = [] - if project_name.strip(): - projects_to_sync = [get_project(project_name)] + if selection.is_project_selected: + projects_to_sync = [selection.project_entity] else: - projects_to_sync = get_projects() + projects_to_sync = ayon_api.get_projects() - projects_info = {} - for project in projects_to_sync: - task_types = project["config"]["tasks"].keys() - projects_info[project["name"]] = task_types + projects_info = { + project["name"]: { + task_type["name"] + for task_type in project["taskTypes"] + } + for project in projects_to_sync + } clockify_projects = self.clockify_api.get_projects(workspace_id) for project_name, task_types in projects_info.items(): diff --git a/client/ayon_core/modules/deadline/abstract_submit_deadline.py b/client/ayon_core/modules/deadline/abstract_submit_deadline.py index b2da4d1398..2e0518ae20 100644 --- a/client/ayon_core/modules/deadline/abstract_submit_deadline.py +++ b/client/ayon_core/modules/deadline/abstract_submit_deadline.py @@ -29,6 +29,10 @@ from ayon_core.pipeline.publish.lib import ( JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError) +# TODO both 'requests_post' and 'requests_get' should not set 'verify' based +# on environment variable. This should be done in a more controlled way, +# e.g. each deadline url could have checkbox to enabled/disable +# ssl verification. def requests_post(*args, **kwargs): """Wrap request post method. diff --git a/client/ayon_core/modules/deadline/deadline_module.py b/client/ayon_core/modules/deadline/deadline_module.py index 97d346c287..c0ba83477e 100644 --- a/client/ayon_core/modules/deadline/deadline_module.py +++ b/client/ayon_core/modules/deadline/deadline_module.py @@ -1,9 +1,10 @@ import os -import requests -import six import sys -from ayon_core.lib import requests_get, Logger +import requests +import six + +from ayon_core.lib import Logger from ayon_core.modules import AYONAddon, IPluginPaths @@ -45,7 +46,6 @@ class DeadlineModule(AYONAddon, IPluginPaths): @staticmethod def get_deadline_pools(webservice, log=None): - # type: (str) -> list """Get pools from Deadline. Args: webservice (str): Server url. @@ -56,6 +56,8 @@ class DeadlineModule(AYONAddon, IPluginPaths): RuntimeError: If deadline webservice is unreachable. """ + from .abstract_submit_deadline import requests_get + if not log: log = Logger.get_logger(__name__) diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py b/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py index 8123409052..b7ca227b01 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py +++ b/client/ayon_core/modules/deadline/plugins/publish/collect_default_deadline_server.py @@ -9,11 +9,11 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin): DL webservice addresses must be configured first in System Settings for project settings enum to work. - Default webservice could be overriden by + Default webservice could be overridden by `project_settings/deadline/deadline_servers`. Currently only single url is expected. - This url could be overriden by some hosts directly on instances with + This url could be overridden by some hosts directly on instances with `CollectDeadlineServerFromInstance`. """ diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py b/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py index 25951a56b6..6923c2b16b 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py +++ b/client/ayon_core/modules/deadline/plugins/publish/collect_pools.py @@ -31,14 +31,22 @@ class CollectDeadlinePools(pyblish.api.InstancePlugin, "harmony" "nuke", "maya", - "max"] + "max", + "houdini"] families = ["render", "rendering", "render.farm", "renderFarm", "renderlayer", - "maxrender"] + "maxrender", + "usdrender", + "redshift_rop", + "arnold_rop", + "mantra_rop", + "karma_rop", + "vray_rop", + "publish.hou"] primary_pool = None secondary_pool = None diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index a284464009..675346105c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -80,6 +80,8 @@ class AfterEffectsSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py index ae19e63a37..ab342c1a9d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_blender_deadline.py @@ -102,6 +102,8 @@ class BlenderSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py index bc3636da63..1fae23c9b2 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -74,6 +74,10 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): render_path = os.path.normpath(render_path) script_name = os.path.basename(script_path) + anatomy = instance.context.data["anatomy"] + publish_template = anatomy.get_template_item( + "publish", "default", "path" + ) for item in instance.context: if "workfile" in item.data["productType"]: msg = "Workfile (scene) must be published along" @@ -84,9 +88,9 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): template_data["representation"] = rep template_data["ext"] = rep template_data["comment"] = None - anatomy_filled = instance.context.data["anatomy"].format( - template_data) - template_filled = anatomy_filled["publish"]["path"] + template_filled = publish_template.format_strict( + template_data + ) script_path = os.path.normpath(template_filled) self.log.info( diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py index 837ed91c60..e3a4cd8030 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_fusion_deadline.py @@ -103,17 +103,17 @@ class FusionSubmitDeadline( # Collect all saver instances in context that are to be rendered saver_instances = [] - for instance in context: - if instance.data["productType"] != "render": + for inst in context: + if inst.data["productType"] != "render": # Allow only saver family instances continue - if not instance.data.get("publish", True): + if not inst.data.get("publish", True): # Skip inactive instances continue - self.log.debug(instance.data["name"]) - saver_instances.append(instance) + self.log.debug(inst.data["name"]) + saver_instances.append(inst) if not saver_instances: raise RuntimeError("No instances found for Deadline submission") @@ -123,6 +123,10 @@ class FusionSubmitDeadline( script_path = context.data["currentFile"] + anatomy = instance.context.data["anatomy"] + publish_template = anatomy.get_template_item( + "publish", "default", "path" + ) for item in context: if "workfile" in item.data["families"]: msg = "Workfile (scene) must be published along" @@ -133,8 +137,9 @@ class FusionSubmitDeadline( template_data["representation"] = rep template_data["ext"] = rep template_data["comment"] = None - anatomy_filled = context.data["anatomy"].format(template_data) - template_filled = anatomy_filled["publish"]["path"] + template_filled = publish_template.format_strict( + template_data + ) script_path = os.path.normpath(template_filled) self.log.info( @@ -220,6 +225,8 @@ class FusionSubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py index beb8afc3a3..d52b16b27d 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -273,6 +273,8 @@ class HarmonySubmitDeadline( "FTRACK_API_KEY", "FTRACK_API_USER", "FTRACK_SERVER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py index 94e0947952..5826607ff4 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_cache_deadline.py @@ -45,9 +45,11 @@ class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline targets = ["local"] priority = 50 + chunk_size = 999999 + group = None jobInfo = {} pluginInfo = {} - group = None + def get_job_info(self): job_info = DeadlineJobInfo(Plugin="Houdini") @@ -88,7 +90,7 @@ class HoudiniCacheSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline attr_values = self.get_attr_values_from_data(instance.data) - job_info.ChunkSize = instance.data["chunkSize"] + job_info.ChunkSize = instance.data.get("chunk_size", self.chunk_size) job_info.Comment = context.data.get("comment") job_info.Priority = attr_values.get("priority", self.priority) job_info.Group = attr_values.get("group", self.group) diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 486fdfd634..6952604293 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -11,6 +11,7 @@ from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo from ayon_core.lib import ( is_in_tests, BoolDef, + TextDef, NumberDef ) @@ -79,16 +80,21 @@ class HoudiniSubmitDeadline( use_published = True # presets - priority = 50 - chunk_size = 1 export_priority = 50 export_chunk_size = 10 - group = "" export_group = "" - + priority = 50 + chunk_size = 1 + group = "" + @classmethod def get_attribute_defs(cls): return [ + BoolDef( + "suspend_publish", + default=False, + label="Suspend publish" + ), NumberDef( "priority", label="Priority", @@ -103,10 +109,15 @@ class HoudiniSubmitDeadline( minimum=1, maximum=1000 ), + TextDef( + "group", + default=cls.group, + label="Group Name" + ), NumberDef( "export_priority", label="Export Priority", - default=cls.priority, + default=cls.export_priority, decimals=0 ), NumberDef( @@ -117,11 +128,11 @@ class HoudiniSubmitDeadline( minimum=1, maximum=1000 ), - BoolDef( - "suspend_publish", - default=False, - label="Suspend publish" - ) + TextDef( + "export_group", + default=cls.export_group, + label="Export Group Name" + ), ] def get_job_info(self, dependency_job_ids=None): @@ -163,15 +174,6 @@ class HoudiniSubmitDeadline( job_info.UserName = context.data.get( "deadlineUser", getpass.getuser()) - if split_render_job and is_export_job: - job_info.Priority = attribute_values.get( - "export_priority", self.export_priority - ) - else: - job_info.Priority = attribute_values.get( - "priority", self.priority - ) - if is_in_tests(): job_info.BatchName += datetime.now().strftime("%d%m%Y%H%M%S") @@ -192,15 +194,23 @@ class HoudiniSubmitDeadline( job_info.Pool = instance.data.get("primaryPool") job_info.SecondaryPool = instance.data.get("secondaryPool") - job_info.Group = self.group + if split_render_job and is_export_job: + job_info.Priority = attribute_values.get( + "export_priority", self.export_priority + ) job_info.ChunkSize = attribute_values.get( "export_chunk", self.export_chunk_size ) + job_info.Group = self.export_group else: + job_info.Priority = attribute_values.get( + "priority", self.priority + ) job_info.ChunkSize = attribute_values.get( "chunk", self.chunk_size ) + job_info.Group = self.group job_info.Comment = context.data.get("comment") diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py index 1abefa515a..cba05f6948 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_max_deadline.py @@ -106,12 +106,14 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", "AYON_WORKDIR", "AYON_APP_NAME", - "IS_TEST" + "IS_TEST", ] environment = { diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py index 0e871eb90e..0300b12104 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -207,6 +207,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, "FTRACK_API_USER", "FTRACK_SERVER", "OPENPYPE_SG_USER", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", @@ -651,7 +653,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline, return job_info, attr.asdict(plugin_info) def _get_arnold_render_payload(self, data): - from maya import cmds # Job Info job_info = copy.deepcopy(self.job_info) job_info.Name = self._job_info_label("Render") @@ -856,10 +857,10 @@ def _format_tiles( """ # Math used requires integers for correct output - as such # we ensure our inputs are correct. - assert type(tiles_x) is int, "tiles_x must be an integer" - assert type(tiles_y) is int, "tiles_y must be an integer" - assert type(width) is int, "width must be an integer" - assert type(height) is int, "height must be an integer" + assert isinstance(tiles_x, int), "tiles_x must be an integer" + assert isinstance(tiles_y, int), "tiles_y must be an integer" + assert isinstance(width, int), "width must be an integer" + assert isinstance(height, int), "height must be an integer" out = {"JobInfo": {}, "PluginInfo": {}} cfg = OrderedDict() diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py index a3111454b3..d70cb75bf3 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -196,6 +196,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, def _get_published_workfile_path(self, context): """This method is temporary while the class is not inherited from AbstractSubmitDeadline""" + anatomy = context.data["anatomy"] + # WARNING Hardcoded template name 'default' > may not be used + publish_template = anatomy.get_template_item( + "publish", "default", "path" + ) for instance in context: if ( instance.data["productType"] != "workfile" @@ -216,11 +221,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, template_data["ext"] = ext template_data["comment"] = None - anatomy = context.data["anatomy"] - # WARNING Hardcoded template name 'publish' > may not be used - template_obj = anatomy.templates_obj["publish"]["path"] - - template_filled = template_obj.format(template_data) + template_filled = publish_template.format(template_data) script_path = os.path.normpath(template_filled) self.log.info( "Using published scene for render {}".format( @@ -375,6 +376,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, keys = [ "PYTHONPATH", "PATH", + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", "AYON_PROJECT_NAME", "AYON_FOLDER_PATH", "AYON_TASK_NAME", @@ -387,7 +390,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin, "TOOL_ENV", "FOUNDRY_LICENSE", "OPENPYPE_SG_USER", - "AYON_BUNDLE_NAME", ] # add allowed keys from preset if any diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py index 3e95049e56..4e4657d886 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_cache_job.py @@ -4,13 +4,11 @@ import os import json import re from copy import deepcopy -import requests +import requests +import ayon_api import pyblish.api -from ayon_core.client import ( - get_last_version_by_subset_name, -) from ayon_core.pipeline import publish from ayon_core.lib import EnumDef, is_in_tests from ayon_core.pipeline.version_start import get_versioning_start @@ -69,7 +67,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", "KITSU_LOGIN", "KITSU_PWD" ] @@ -112,7 +110,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, output_dir = self._get_publish_folder( anatomy, deepcopy(instance.data["anatomyData"]), - instance.data.get("folderPath"), + instance.data.get("folderEntity"), instance.data["productName"], instance.context, instance.data["productType"], @@ -135,6 +133,9 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys @@ -382,7 +383,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, json.dump(publish_job, f, indent=4, sort_keys=True) def _get_publish_folder(self, anatomy, template_data, - asset, product_name, context, + folder_entity, product_name, context, product_type, version=None): """ Extracted logic to pre-calculate real publish folder, which is @@ -396,7 +397,7 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, Args: anatomy (ayon_core.pipeline.anatomy.Anatomy): template_data (dict): pre-calculated collected data for process - asset (str): asset name + folder_entity (dict[str, Any]): Folder entity. product_name (str): Product name (actually group name of product). product_type (str): for current deadline process it's always 'render' @@ -411,18 +412,22 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, """ project_name = context.data["projectName"] + host_name = context.data["hostName"] if not version: - version = get_last_version_by_subset_name( - project_name, - product_name, - asset_name=asset - ) - if version: - version = int(version["name"]) + 1 + version_entity = None + if folder_entity: + version_entity = ayon_api.get_last_version_by_product_name( + project_name, + product_name, + folder_entity["id"] + ) + + if version_entity: + version = int(version_entity["version"]) + 1 else: version = get_versioning_start( project_name, - template_data["app"], + host_name, task_name=template_data["task"]["name"], task_type=template_data["task"]["type"], product_type="render", @@ -430,7 +435,6 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, project_settings=context.data["project_settings"] ) - host_name = context.data["hostName"] task_info = template_data.get("task") or {} template_name = publish.get_publish_template_name( @@ -449,23 +453,10 @@ class ProcessSubmittedCacheJobOnFarm(pyblish.api.InstancePlugin, "type": product_type, } - render_templates = anatomy.templates_obj[template_name] - if "folder" in render_templates: - publish_folder = render_templates["folder"].format_strict( - template_data - ) - else: - # solve deprecated situation when `folder` key is not underneath - # `publish` anatomy - self.log.warning(( - "Deprecation warning: Anatomy does not have set `folder`" - " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) - - file_path = render_templates["path"].format_strict(template_data) - publish_folder = os.path.dirname(file_path) - - return publish_folder + render_dir_template = anatomy.get_template_item( + "publish", template_name, "directory" + ) + return render_dir_template.format_strict(template_data) @classmethod def get_attribute_defs(cls): diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py index 7bc13ae4b6..8def9cc63c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py +++ b/client/ayon_core/modules/deadline/plugins/publish/submit_publish_job.py @@ -4,14 +4,12 @@ import os import json import re from copy import deepcopy + import requests import clique - +import ayon_api import pyblish.api -from ayon_core.client import ( - get_last_version_by_subset_name, -) from ayon_core.pipeline import publish from ayon_core.lib import EnumDef, is_in_tests from ayon_core.pipeline.version_start import get_versioning_start @@ -132,7 +130,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", "KITSU_LOGIN", "KITSU_PWD" ] @@ -189,7 +187,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, output_dir = self._get_publish_folder( anatomy, deepcopy(instance.data["anatomyData"]), - instance.data.get("folderPath"), + instance.data.get("folderEntity"), instances[0]["productName"], instance.context, instances[0]["productType"], @@ -212,6 +210,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "AYON_RENDER_JOB": "0", "AYON_REMOTE_PUBLISH": "0", "AYON_BUNDLE_NAME": os.environ["AYON_BUNDLE_NAME"], + "AYON_DEFAULT_SETTINGS_VARIANT": ( + os.environ["AYON_DEFAULT_SETTINGS_VARIANT"] + ), } # add environments from self.environ_keys @@ -503,7 +504,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, json.dump(publish_job, f, indent=4, sort_keys=True) def _get_publish_folder(self, anatomy, template_data, - asset, product_name, context, + folder_entity, product_name, context, product_type, version=None): """ Extracted logic to pre-calculate real publish folder, which is @@ -517,7 +518,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, Args: anatomy (ayon_core.pipeline.anatomy.Anatomy): template_data (dict): pre-calculated collected data for process - asset (string): asset name + folder_entity (dict[str, Any]): Folder entity. product_name (string): Product name (actually group name of product) product_type (string): for current deadline process it's always @@ -535,13 +536,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, project_name = context.data["projectName"] host_name = context.data["hostName"] if not version: - version = get_last_version_by_subset_name( - project_name, - product_name, - asset_name=asset - ) - if version: - version = int(version["name"]) + 1 + version_entity = None + if folder_entity: + version_entity = ayon_api.get_last_version_by_product_name( + project_name, + product_name, + folder_entity["id"] + ) + + if version_entity: + version = int(version_entity["version"]) + 1 else: version = get_versioning_start( project_name, @@ -572,23 +576,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin, "type": product_type, } - render_templates = anatomy.templates_obj[template_name] - if "folder" in render_templates: - publish_folder = render_templates["folder"].format_strict( - template_data - ) - else: - # solve deprecated situation when `folder` key is not underneath - # `publish` anatomy - self.log.warning(( - "Deprecation warning: Anatomy does not have set `folder`" - " key underneath `publish` (in global of for project `{}`)." - ).format(project_name)) - - file_path = render_templates["path"].format_strict(template_data) - publish_folder = os.path.dirname(file_path) - - return publish_folder + render_dir_template = anatomy.get_template_item( + "publish", template_name, "directory" + ) + return render_dir_template.format_strict(template_data) @classmethod def get_attribute_defs(cls): diff --git a/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py b/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py index a666c5c2dc..6263526d5c 100644 --- a/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py +++ b/client/ayon_core/modules/deadline/plugins/publish/validate_expected_and_rendered_files.py @@ -149,7 +149,7 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin): """ # no frames in file name at all, eg 'renderCompositingMain.withLut.mov' if not frame_placeholder: - return set([file_name_template]) + return {file_name_template} real_expected_rendered = set() src_padding_exp = "%0{}d".format(len(frame_placeholder)) diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py b/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py index de0a2c6d7a..bb7f932013 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/Ayon/Ayon.py @@ -7,7 +7,6 @@ from Deadline.Plugins import PluginType, DeadlinePlugin from Deadline.Scripting import ( StringUtils, FileUtils, - DirectoryUtils, RepositoryUtils ) diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 1565b2c496..ac04407f5b 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -12,9 +12,8 @@ from Deadline.Scripting import ( RepositoryUtils, FileUtils, DirectoryUtils, - ProcessUtils, ) -__version__ = "1.0.1" +__version__ = "1.1.0" VERSION_REGEX = re.compile( r"(?P0|[1-9]\d*)" r"\.(?P0|[1-9]\d*)" @@ -464,19 +463,13 @@ def inject_ayon_environment(deadlinePlugin): export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - add_kwargs = { "envgroup": "farm", } # Support backwards compatible keys for key, env_keys in ( ("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]), - ("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), + ("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), ("task", ["AYON_TASK_NAME", "AVALON_TASK"]), ("app", ["AYON_APP_NAME", "AVALON_APP_NAME"]), ): @@ -487,18 +480,37 @@ def inject_ayon_environment(deadlinePlugin): break add_kwargs[key] = value - if job.GetJobEnvironmentKeyValue("IS_TEST"): - args.append("--automatic-tests") - - if all(add_kwargs.values()): - for key, value in add_kwargs.items(): - args.extend(["--{}".format(key), value]) - else: + if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME," " AYON_FOLDER_PATH, AYON_TASK_NAME, AYON_APP_NAME" )) + # Use applications addon arguments + # TODO validate if applications addon should be used + args = [ + "--headless", + "addon", + "applications", + "extractenvironments", + export_url + ] + # Backwards compatibility for older versions + legacy_args = [ + "--headless", + "extractenvironments", + export_url + ] + if job.GetJobEnvironmentKeyValue("IS_TEST"): + args.append("--automatic-tests") + + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) + # Legacy arguments expect '--asset' instead of '--folder' + if key == "folder": + key = "asset" + legacy_args.extend(["--{}".format(key), value]) + environment = { "AYON_SERVER_URL": ayon_server_url, "AYON_API_KEY": ayon_api_key, @@ -517,9 +529,18 @@ def inject_ayon_environment(deadlinePlugin): ) if process_exitcode != 0: - raise RuntimeError( - "Failed to run Ayon process to extract environments." + print( + "Failed to run AYON process to extract environments. Trying" + " to use legacy arguments." ) + legacy_args_str = subprocess.list2cmdline(legacy_args) + process_exitcode = deadlinePlugin.RunProcess( + exe, legacy_args_str, os.path.dirname(exe), -1 + ) + if process_exitcode != 0: + raise RuntimeError( + "Failed to run AYON process to extract environments." + ) print(">>> Loading file ...") with open(export_url) as fp: diff --git a/client/ayon_core/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py b/client/ayon_core/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py index 9641c16d20..f146aef7b4 100644 --- a/client/ayon_core/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py +++ b/client/ayon_core/modules/deadline/repository/custom/plugins/OpenPypeTileAssembler/OpenPypeTileAssembler.py @@ -404,7 +404,7 @@ class OpenPypeTileAssembler(DeadlinePlugin): Args: output_width (int): Width of output image. output_height (int): Height of output image. - tiles_info (list): List of tile items, each item must be + tile_info (list): List of tile items, each item must be dictionary with `filepath`, `pos_x` and `pos_y` keys representing path to file and x, y coordinates on output image where top-left point of tile item should start. diff --git a/client/ayon_core/modules/job_queue/addon.py b/client/ayon_core/modules/job_queue/addon.py index 32d06d0040..0fa54eb2f0 100644 --- a/client/ayon_core/modules/job_queue/addon.py +++ b/client/ayon_core/modules/job_queue/addon.py @@ -168,7 +168,7 @@ class JobQueueAddon(AYONAddon): @classmethod def start_worker(cls, app_name, server_url=None): import requests - from ayon_core.lib import ApplicationManager + from ayon_applications import ApplicationManager if not server_url: server_url = cls.get_server_url_from_settings() diff --git a/client/ayon_core/modules/launcher_action.py b/client/ayon_core/modules/launcher_action.py index 1faf6ef4b1..38e88d36ca 100644 --- a/client/ayon_core/modules/launcher_action.py +++ b/client/ayon_core/modules/launcher_action.py @@ -37,20 +37,6 @@ class LauncherAction(AYONAddon, ITrayAction): if path and os.path.exists(path): register_launcher_action_path(path) - paths_str = os.environ.get("AVALON_ACTIONS") or "" - if paths_str: - self.log.warning( - "WARNING: 'AVALON_ACTIONS' is deprecated. Support of this" - " environment variable will be removed in future versions." - " Please consider using 'OpenPypeModule' to define custom" - " action paths. Planned version to drop the support" - " is 3.17.2 or 3.18.0 ." - ) - - for path in paths_str.split(os.pathsep): - if path and os.path.exists(path): - register_launcher_action_path(path) - def on_action_trigger(self): """Implementation for ITrayAction interface. diff --git a/client/ayon_core/modules/loader_action.py b/client/ayon_core/modules/loader_action.py index a0cc417b66..1e45db05dc 100644 --- a/client/ayon_core/modules/loader_action.py +++ b/client/ayon_core/modules/loader_action.py @@ -13,7 +13,7 @@ class LoaderAddon(AYONAddon, ITrayAddon): # Add library tool self._loader_imported = False try: - from ayon_core.tools.loader.ui import LoaderWindow + from ayon_core.tools.loader.ui import LoaderWindow # noqa F401 self._loader_imported = True except Exception: diff --git a/client/ayon_core/modules/royalrender/api.py b/client/ayon_core/modules/royalrender/api.py index cd72014a42..a69f88c43c 100644 --- a/client/ayon_core/modules/royalrender/api.py +++ b/client/ayon_core/modules/royalrender/api.py @@ -1,13 +1,14 @@ # -*- coding: utf-8 -*- """Wrapper around Royal Render API.""" -import sys import os +import sys -from ayon_core.lib.local_settings import AYONSettingsRegistry -from ayon_core.lib import Logger, run_subprocess -from .rr_job import RRJob, SubmitFile, SubmitterParameter +from ayon_core.lib import Logger, run_subprocess, AYONSettingsRegistry from ayon_core.lib.vendor_bin_utils import find_tool_in_custom_paths +from .rr_job import SubmitFile +from .rr_job import RRjob, SubmitterParameter # noqa F401 + class Api: diff --git a/client/ayon_core/modules/royalrender/lib.py b/client/ayon_core/modules/royalrender/lib.py index d552e7fb19..82bc96e759 100644 --- a/client/ayon_core/modules/royalrender/lib.py +++ b/client/ayon_core/modules/royalrender/lib.py @@ -2,7 +2,6 @@ """Submitting render job to RoyalRender.""" import os import json -import platform import re import tempfile import uuid @@ -309,31 +308,45 @@ class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin, export_url = os.path.join(tempfile.gettempdir(), temp_file_name) print(">>> Temporary path: {}".format(export_url)) - args = [ - "--headless", - "extractenvironments", - export_url - ] - anatomy_data = instance.context.data["anatomyData"] + addons_manager = instance.context.data["ayonAddonsManager"] + applications_addon = addons_manager.get_enabled_addon("applications") + + folder_key = "folder" + if applications_addon is None: + # Use 'asset' when applications addon command is not used + folder_key = "asset" add_kwargs = { "project": anatomy_data["project"]["name"], - "asset": instance.context.data["folderPath"], + folder_key: instance.context.data["folderPath"], "task": anatomy_data["task"]["name"], "app": instance.context.data.get("appName"), "envgroup": "farm" } - if os.getenv('IS_TEST'): - args.append("--automatic-tests") - if not all(add_kwargs.values()): raise RuntimeError(( "Missing required env vars: AYON_PROJECT_NAME, AYON_FOLDER_PATH," " AYON_TASK_NAME, AYON_APP_NAME" )) + args = ["--headless"] + # Use applications addon to extract environments + # NOTE this is for backwards compatibility, the global command + # will be removed in future and only applications addon command + # should be used. + if applications_addon is not None: + args.extend(["addon", "applications"]) + + args.extend([ + "extractenvironments", + export_url + ]) + + if os.getenv('IS_TEST'): + args.append("--automatic-tests") + for key, value in add_kwargs.items(): args.extend([f"--{key}", value]) self.log.debug("Executing: {}".format(" ".join(args))) diff --git a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py index 5d177fec07..51500f84f5 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/create_publish_royalrender_job.py @@ -3,7 +3,6 @@ import os import attr import json -import re import pyblish.api @@ -65,7 +64,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, "FTRACK_SERVER", "AYON_APP_NAME", "AYON_USERNAME", - "OPENPYPE_SG_USER", + "AYON_SG_USERNAME", ] priority = 50 @@ -198,7 +197,7 @@ class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin, priority = self.priority or instance.data.get("priority", 50) - # rr requires absolut path or all jobs won't show up in rControl + # rr requires absolute path or all jobs won't show up in rrControl abs_metadata_path = self.anatomy.fill_root(rootless_metadata_path) # command line set in E01__OpenPype__PublishJob.cfg, here only diff --git a/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py b/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py index 54de943428..09c1dc4a54 100644 --- a/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py +++ b/client/ayon_core/modules/royalrender/plugins/publish/submit_jobs_to_royalrender.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Submit jobs to RoyalRender.""" import tempfile -import platform import pyblish.api from ayon_core.modules.royalrender.api import ( diff --git a/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py b/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py index 778052778f..8405f69b3e 100644 --- a/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py +++ b/client/ayon_core/modules/royalrender/rr_root/plugins/control_job/perjob/m50__openpype_publish_render.py @@ -109,7 +109,7 @@ class OpenPypeContextSelector: if not self.context or \ not self.context.get("project") or \ - not self.context.get("asset") or \ + not self.context.get("folder") or \ not self.context.get("task"): self._show_rr_warning("Context selection failed.") return False @@ -137,7 +137,7 @@ class OpenPypeContextSelector: def run_publish(self): """Run publish process.""" env = {"AYON_PROJECT_NAME": str(self.context.get("project")), - "AYON_FOLDER_PATH": str(self.context.get("asset")), + "AYON_FOLDER_PATH": str(self.context.get("folder")), "AYON_TASK_NAME": str(self.context.get("task")), # "AYON_APP_NAME": str(self.context.get("app_name")) } @@ -184,7 +184,7 @@ selector = OpenPypeContextSelector() # try to set context from environment for key, env_keys in ( ("project", ["AYON_PROJECT_NAME", "AVALON_PROJECT"]), - ("asset", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), + ("folder", ["AYON_FOLDER_PATH", "AVALON_ASSET"]), ("task", ["AYON_TASK_NAME", "AVALON_TASK"]), # ("app_name", ["AYON_APP_NAME", "AVALON_APP_NAME"]) ): diff --git a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py index 4e94603aa9..b402d4034a 100644 --- a/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py +++ b/client/ayon_core/modules/timers_manager/launch_hooks/post_start_timer.py @@ -1,4 +1,4 @@ -from ayon_core.lib.applications import PostLaunchHook, LaunchTypes +from ayon_applications import PostLaunchHook, LaunchTypes class PostStartTimerHook(PostLaunchHook): @@ -11,13 +11,13 @@ class PostStartTimerHook(PostLaunchHook): def execute(self): project_name = self.data.get("project_name") - asset_name = self.data.get("folder_path") + folder_path = self.data.get("folder_path") task_name = self.data.get("task_name") missing_context_keys = set() if not project_name: missing_context_keys.add("project_name") - if not asset_name: + if not folder_path: missing_context_keys.add("folder_path") if not task_name: missing_context_keys.add("task_name") @@ -40,5 +40,5 @@ class PostStartTimerHook(PostLaunchHook): return timers_manager.start_timer_with_webserver( - project_name, asset_name, task_name, logger=self.log + project_name, folder_path, task_name, logger=self.log ) diff --git a/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py b/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py index 182efbc4ae..620cdb6e65 100644 --- a/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py +++ b/client/ayon_core/modules/timers_manager/plugins/publish/start_timer.py @@ -24,14 +24,14 @@ class StartTimer(pyblish.api.ContextPlugin): return project_name = context.data["projectName"] - asset_name = context.data.get("folderPath") + folder_path = context.data.get("folderPath") task_name = context.data.get("task") - if not project_name or not asset_name or not task_name: + if not project_name or not folder_path or not task_name: self.log.info(( "Current context does not contain all" " required information to start a timer." )) return timers_manager.start_timer_with_webserver( - project_name, asset_name, task_name, self.log + project_name, folder_path, task_name, self.log ) diff --git a/client/ayon_core/modules/timers_manager/rest_api.py b/client/ayon_core/modules/timers_manager/rest_api.py index c890d587de..88a6539510 100644 --- a/client/ayon_core/modules/timers_manager/rest_api.py +++ b/client/ayon_core/modules/timers_manager/rest_api.py @@ -45,7 +45,7 @@ class TimersManagerModuleRestApi: data = await request.json() try: project_name = data["project_name"] - asset_name = data["folder_path"] + folder_path = data["folder_path"] task_name = data["task_name"] except KeyError: msg = ( @@ -57,7 +57,7 @@ class TimersManagerModuleRestApi: self.module.stop_timers() try: - self.module.start_timer(project_name, asset_name, task_name) + self.module.start_timer(project_name, folder_path, task_name) except Exception as exc: return Response(status=404, message=str(exc)) @@ -70,9 +70,9 @@ class TimersManagerModuleRestApi: async def get_task_time(self, request): data = await request.json() try: - project_name = data['project_name'] - asset_name = data['folder_path'] - task_name = data['task_name'] + project_name = data["project_name"] + folder_path = data["folder_path"] + task_name = data["task_name"] except KeyError: message = ( "Payload must contain fields 'project_name, 'folder_path'," @@ -81,5 +81,5 @@ class TimersManagerModuleRestApi: self.log.warning(message) return Response(text=message, status=404) - time = self.module.get_task_time(project_name, asset_name, task_name) + time = self.module.get_task_time(project_name, folder_path, task_name) return Response(text=json.dumps(time)) diff --git a/client/ayon_core/modules/timers_manager/timers_manager.py b/client/ayon_core/modules/timers_manager/timers_manager.py index e04200525a..4212ff6b25 100644 --- a/client/ayon_core/modules/timers_manager/timers_manager.py +++ b/client/ayon_core/modules/timers_manager/timers_manager.py @@ -1,8 +1,8 @@ import os import platform +import ayon_api -from ayon_core.client import get_asset_by_name from ayon_core.addon import ( AYONAddon, ITrayService, @@ -24,14 +24,18 @@ class ExampleTimersManagerConnector: Required methods are 'stop_timer' and 'start_timer'. - # TODO pass asset document instead of `hierarchy` Example of `data` that are passed during changing timer: ``` data = { "project_name": project_name, + "folder_id": folder_id, + "folder_path": folder_entity["path"], "task_name": task_name, "task_type": task_type, - "hierarchy": hierarchy + # Deprecated + "asset_id": folder_id, + "asset_name": folder_entity["name"], + "hierarchy": hierarchy_items, } ``` """ @@ -176,16 +180,14 @@ class TimersManager( """Convert string path to a timer data. It is expected that first item is project name, last item is task name - and parent asset name is before task name. + and folder path in the middle. """ path_items = task_path.split("/") - if len(path_items) < 3: - raise InvalidContextError("Invalid path \"{}\"".format(task_path)) task_name = path_items.pop(-1) - asset_name = path_items.pop(-1) project_name = path_items.pop(0) + folder_path = "/" + "/".join(path_items) return self.get_timer_data_for_context( - project_name, asset_name, task_name, self.log + project_name, folder_path, task_name, self.log ) def get_launch_hook_paths(self): @@ -204,40 +206,38 @@ class TimersManager( @staticmethod def get_timer_data_for_context( - project_name, asset_name, task_name, logger=None + project_name, folder_path, task_name, logger=None ): - """Prepare data for timer related callbacks. - - TODO: - - return predefined object that has access to asset document etc. - """ - if not project_name or not asset_name or not task_name: + """Prepare data for timer related callbacks.""" + if not project_name or not folder_path or not task_name: raise InvalidContextError(( "Missing context information got" - " Project: \"{}\" Asset: \"{}\" Task: \"{}\"" - ).format(str(project_name), str(asset_name), str(task_name))) + " Project: \"{}\" Folder: \"{}\" Task: \"{}\"" + ).format(str(project_name), str(folder_path), str(task_name))) - asset_doc = get_asset_by_name( + folder_entity = ayon_api.get_folder_by_path( project_name, - asset_name, - fields=["_id", "name", "data.tasks", "data.parents"] + folder_path, + fields={"id", "name", "path"} ) - if not asset_doc: + if not folder_entity: raise InvalidContextError(( - "Asset \"{}\" not found in project \"{}\"" - ).format(asset_name, project_name)) + "Folder \"{}\" not found in project \"{}\"" + ).format(folder_path, project_name)) - asset_data = asset_doc.get("data") or {} - asset_tasks = asset_data.get("tasks") or {} - if task_name not in asset_tasks: + folder_id = folder_entity["id"] + task_entity = ayon_api.get_task_by_name( + project_name, folder_id, task_name + ) + if not task_entity: raise InvalidContextError(( - "Task \"{}\" not found on asset \"{}\" in project \"{}\"" - ).format(task_name, asset_name, project_name)) + "Task \"{}\" not found on folder \"{}\" in project \"{}\"" + ).format(task_name, folder_path, project_name)) task_type = "" try: - task_type = asset_tasks[task_name]["type"] + task_type = task_entity["taskType"] except KeyError: msg = "Couldn't find task_type for {}".format(task_name) if logger is not None: @@ -245,32 +245,34 @@ class TimersManager( else: print(msg) - hierarchy_items = asset_data.get("parents") or [] - hierarchy_items.append(asset_name) + hierarchy_items = folder_entity["path"].split("/") + hierarchy_items.pop(0) return { "project_name": project_name, - "asset_id": str(asset_doc["_id"]), - "asset_name": asset_name, + "folder_id": folder_id, + "folder_path": folder_entity["path"], "task_name": task_name, "task_type": task_type, - "hierarchy": hierarchy_items + "asset_id": folder_id, + "asset_name": folder_entity["name"], + "hierarchy": hierarchy_items, } - def start_timer(self, project_name, asset_name, task_name): + def start_timer(self, project_name, folder_path, task_name): """Start timer for passed context. Args: - project_name (str): Project name - asset_name (str): Asset name - task_name (str): Task name + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. """ data = self.get_timer_data_for_context( - project_name, asset_name, task_name, self.log + project_name, folder_path, task_name, self.log ) self.timer_started(None, data) - def get_task_time(self, project_name, asset_name, task_name): + def get_task_time(self, project_name, folder_path, task_name): """Get total time for passed context. TODO: @@ -281,7 +283,7 @@ class TimersManager( if hasattr(connector, "get_task_time"): module = self._modules_by_id[module_id] times[module.name] = connector.get_task_time( - project_name, asset_name, task_name + project_name, folder_path, task_name ) return times @@ -394,7 +396,7 @@ class TimersManager( @staticmethod def start_timer_with_webserver( - project_name, asset_name, task_name, logger=None + project_name, folder_path, task_name, logger=None ): """Prepared method for calling change timers on REST api. @@ -403,7 +405,7 @@ class TimersManager( Args: project_name (str): Project name. - asset_name (str): Asset name. + folder_path (str): Folder path. task_name (str): Task name. logger (logging.Logger): Logger object. Using 'print' if not passed. @@ -430,7 +432,7 @@ class TimersManager( return data = { "project_name": project_name, - "folder_path": asset_name, + "folder_path": folder_path, "task_name": task_name } @@ -472,13 +474,13 @@ class TimersManager( def _on_host_task_change(self, event): project_name = event["project_name"] - asset_name = event["folder_path"] + folder_path = event["folder_path"] task_name = event["task_name"] self.log.debug(( "Sending message that timer should change to" - " Project: {} Asset: {} Task: {}" - ).format(project_name, asset_name, task_name)) + " Project: {} Folder: {} Task: {}" + ).format(project_name, folder_path, task_name)) self.start_timer_with_webserver( - project_name, asset_name, task_name, self.log + project_name, folder_path, task_name, self.log ) diff --git a/client/ayon_core/pipeline/__init__.py b/client/ayon_core/pipeline/__init__.py index 679e9a195e..d1a181a353 100644 --- a/client/ayon_core/pipeline/__init__.py +++ b/client/ayon_core/pipeline/__init__.py @@ -31,7 +31,7 @@ from .load import ( HeroVersionType, IncompatibleLoaderError, LoaderPlugin, - SubsetLoaderPlugin, + ProductLoaderPlugin, discover_loader_plugins, register_loader_plugin, @@ -94,7 +94,7 @@ from .context_tools import ( get_current_context, get_current_host_name, get_current_project_name, - get_current_asset_name, + get_current_folder_path, get_current_task_name ) install = install_host @@ -136,7 +136,7 @@ __all__ = ( "HeroVersionType", "IncompatibleLoaderError", "LoaderPlugin", - "SubsetLoaderPlugin", + "ProductLoaderPlugin", "discover_loader_plugins", "register_loader_plugin", @@ -195,7 +195,7 @@ __all__ = ( "get_current_context", "get_current_host_name", "get_current_project_name", - "get_current_asset_name", + "get_current_folder_path", "get_current_task_name", # Backwards compatible function names diff --git a/client/ayon_core/pipeline/actions.py b/client/ayon_core/pipeline/actions.py index 8e0ce7e583..eae2fc94b5 100644 --- a/client/ayon_core/pipeline/actions.py +++ b/client/ayon_core/pipeline/actions.py @@ -1,4 +1,8 @@ import logging +import warnings + +import ayon_api + from ayon_core.pipeline.plugin_discover import ( discover, register_plugin, @@ -10,6 +14,288 @@ from ayon_core.pipeline.plugin_discover import ( from .load.utils import get_representation_path_from_context +class LauncherActionSelection: + """Object helper to pass selection to actions. + + Object support backwards compatibility for 'session' from OpenPype where + environment variable keys were used to define selection. + + Args: + project_name (str): Selected project name. + folder_id (str): Selected folder id. + task_id (str): Selected task id. + folder_path (Optional[str]): Selected folder path. + task_name (Optional[str]): Selected task name. + project_entity (Optional[dict[str, Any]]): Project entity. + folder_entity (Optional[dict[str, Any]]): Folder entity. + task_entity (Optional[dict[str, Any]]): Task entity. + + """ + def __init__( + self, + project_name, + folder_id, + task_id, + folder_path=None, + task_name=None, + project_entity=None, + folder_entity=None, + task_entity=None + ): + self._project_name = project_name + self._folder_id = folder_id + self._task_id = task_id + + self._folder_path = folder_path + self._task_name = task_name + + self._project_entity = project_entity + self._folder_entity = folder_entity + self._task_entity = task_entity + + def __getitem__(self, key): + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + if key in {"AYON_PROJECT_NAME", "AVALON_PROJECT"}: + return self.project_name + if key in {"AYON_FOLDER_PATH", "AVALON_ASSET"}: + return self.folder_path + if key in {"AYON_TASK_NAME", "AVALON_TASK"}: + return self.task_name + raise KeyError(f"Key: {key} not found") + + def __iter__(self): + for key in self.keys(): + yield key + + def __contains__(self, key): + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + # Fake missing keys check for backwards compatibility + if key in { + "AYON_PROJECT_NAME", + "AVALON_PROJECT", + }: + return self._project_name is not None + if key in { + "AYON_FOLDER_PATH", + "AVALON_ASSET", + }: + return self._folder_id is not None + if key in { + "AYON_TASK_NAME", + "AVALON_TASK", + }: + return self._task_id is not None + return False + + def get(self, key, default=None): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + warnings.warn( + ( + "Using deprecated access to selection data. Please use" + " attributes and methods" + " defined by 'LauncherActionSelection'." + ), + category=DeprecationWarning + ) + try: + return self[key] + except KeyError: + return default + + def items(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for key, value in ( + ("AYON_PROJECT_NAME", self.project_name), + ("AYON_FOLDER_PATH", self.folder_path), + ("AYON_TASK_NAME", self.task_name), + ): + if value is not None: + yield (key, value) + + def keys(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for key, _ in self.items(): + yield key + + def values(self): + """ + + Deprecated: + Added for backwards compatibility with older actions. + + """ + for _, value in self.items(): + yield value + + def get_project_name(self): + """Selected project name. + + Returns: + Union[str, None]: Selected project name. + + """ + return self._project_name + + def get_folder_id(self): + """Selected folder id. + + Returns: + Union[str, None]: Selected folder id. + + """ + return self._folder_id + + def get_folder_path(self): + """Selected folder path. + + Returns: + Union[str, None]: Selected folder path. + + """ + if self._folder_id is None: + return None + if self._folder_path is None: + self._folder_path = self.folder_entity["path"] + return self._folder_path + + def get_task_id(self): + """Selected task id. + + Returns: + Union[str, None]: Selected task id. + + """ + return self._task_id + + def get_task_name(self): + """Selected task name. + + Returns: + Union[str, None]: Selected task name. + + """ + if self._task_id is None: + return None + if self._task_name is None: + self._task_name = self.task_entity["name"] + return self._task_name + + def get_project_entity(self): + """Project entity for the selection. + + Returns: + Union[dict[str, Any], None]: Project entity. + + """ + if self._project_name is None: + return None + if self._project_entity is None: + self._project_entity = ayon_api.get_project(self._project_name) + return self._project_entity + + def get_folder_entity(self): + """Folder entity for the selection. + + Returns: + Union[dict[str, Any], None]: Folder entity. + + """ + if self._project_name is None or self._folder_id is None: + return None + if self._folder_entity is None: + self._folder_entity = ayon_api.get_folder_by_id( + self._project_name, self._folder_id + ) + return self._folder_entity + + def get_task_entity(self): + """Task entity for the selection. + + Returns: + Union[dict[str, Any], None]: Task entity. + + """ + if ( + self._project_name is None + or self._task_id is None + ): + return None + if self._task_entity is None: + self._task_entity = ayon_api.get_task_by_id( + self._project_name, self._task_id + ) + return self._task_entity + + @property + def is_project_selected(self): + """Return whether a project is selected. + + Returns: + bool: Whether a project is selected. + + """ + return self._project_name is not None + + @property + def is_folder_selected(self): + """Return whether a folder is selected. + + Returns: + bool: Whether a folder is selected. + + """ + return self._folder_id is not None + + @property + def is_task_selected(self): + """Return whether a task is selected. + + Returns: + bool: Whether a task is selected. + + """ + return self._task_id is not None + + project_name = property(get_project_name) + folder_id = property(get_folder_id) + task_id = property(get_task_id) + folder_path = property(get_folder_path) + task_name = property(get_task_name) + + project_entity = property(get_project_entity) + folder_entity = property(get_folder_entity) + task_entity = property(get_task_entity) + + class LauncherAction(object): """A custom action available""" name = None @@ -21,17 +307,23 @@ class LauncherAction(object): log = logging.getLogger("LauncherAction") log.propagate = True - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the class is compatible with the Session. Args: - session (dict[str, Union[str, None]]): Session data with - AYON_PROJECT_NAME, AYON_FOLDER_PATH and AYON_TASK_NAME. - """ + selection (LauncherActionSelection): Data with selection. + """ return True - def process(self, session, **kwargs): + def process(self, selection, **kwargs): + """Process the action. + + Args: + selection (LauncherActionSelection): Data with selection. + **kwargs: Additional arguments. + + """ pass diff --git a/client/ayon_core/pipeline/anatomy.py b/client/ayon_core/pipeline/anatomy.py deleted file mode 100644 index e7833a9a15..0000000000 --- a/client/ayon_core/pipeline/anatomy.py +++ /dev/null @@ -1,1476 +0,0 @@ -import os -import re -import copy -import platform -import collections -import numbers - -import six -import time - -from ayon_core.client import get_project, get_ayon_server_api_connection -from ayon_core.lib import Logger, get_local_site_id -from ayon_core.lib.path_templates import ( - TemplateUnsolved, - TemplateResult, - StringTemplate, - TemplatesDict, - FormatObject, -) -from ayon_core.addon import AddonsManager - -log = Logger.get_logger(__name__) - - -class ProjectNotSet(Exception): - """Exception raised when is created Anatomy without project name.""" - - -class RootCombinationError(Exception): - """This exception is raised when templates has combined root types.""" - - def __init__(self, roots): - joined_roots = ", ".join( - ["\"{}\"".format(_root) for _root in roots] - ) - # TODO better error message - msg = ( - "Combination of root with and" - " without root name in AnatomyTemplates. {}" - ).format(joined_roots) - - super(RootCombinationError, self).__init__(msg) - - -class BaseAnatomy(object): - """Anatomy module helps to keep project settings. - - Wraps key project specifications, AnatomyTemplates and Roots. - """ - root_key_regex = re.compile(r"{(root?[^}]+)}") - root_name_regex = re.compile(r"root\[([^]]+)\]") - - def __init__(self, project_doc, root_overrides=None): - project_name = project_doc["name"] - self.project_name = project_name - self.project_code = project_doc["data"]["code"] - - self._data = self._prepare_anatomy_data( - project_doc, root_overrides - ) - self._templates_obj = AnatomyTemplates(self) - self._roots_obj = Roots(self) - - # Anatomy used as dictionary - # - implemented only getters returning copy - def __getitem__(self, key): - return copy.deepcopy(self._data[key]) - - def get(self, key, default=None): - return copy.deepcopy(self._data).get(key, default) - - def keys(self): - return copy.deepcopy(self._data).keys() - - def values(self): - return copy.deepcopy(self._data).values() - - def items(self): - return copy.deepcopy(self._data).items() - - def _prepare_anatomy_data(self, project_doc, root_overrides): - """Prepare anatomy data for further processing. - - Method added to replace `{task}` with `{task[name]}` in templates. - """ - - anatomy_data = self._project_doc_to_anatomy_data(project_doc) - - self._apply_local_settings_on_anatomy_data( - anatomy_data, - root_overrides - ) - - return anatomy_data - - @property - def templates(self): - """Wrap property `templates` of Anatomy's AnatomyTemplates instance.""" - return self._templates_obj.templates - - @property - def templates_obj(self): - """Return `AnatomyTemplates` object of current Anatomy instance.""" - return self._templates_obj - - def format(self, *args, **kwargs): - """Wrap `format` method of Anatomy's `templates_obj`.""" - return self._templates_obj.format(*args, **kwargs) - - def format_all(self, *args, **kwargs): - """Wrap `format_all` method of Anatomy's `templates_obj`.""" - return self._templates_obj.format_all(*args, **kwargs) - - @property - def roots(self): - """Wrap `roots` property of Anatomy's `roots_obj`.""" - return self._roots_obj.roots - - @property - def roots_obj(self): - """Return `Roots` object of current Anatomy instance.""" - return self._roots_obj - - def root_environments(self): - """Return AYON_PROJECT_ROOT_* environments for current project.""" - return self._roots_obj.root_environments() - - def root_environmets_fill_data(self, template=None): - """Environment variable values in dictionary for rootless path. - - Args: - template (str): Template for environment variable key fill. - By default is set to `"${}"`. - """ - return self.roots_obj.root_environmets_fill_data(template) - - def find_root_template_from_path(self, *args, **kwargs): - """Wrapper for Roots `find_root_template_from_path`.""" - return self.roots_obj.find_root_template_from_path(*args, **kwargs) - - def path_remapper(self, *args, **kwargs): - """Wrapper for Roots `path_remapper`.""" - return self.roots_obj.path_remapper(*args, **kwargs) - - def all_root_paths(self): - """Wrapper for Roots `all_root_paths`.""" - return self.roots_obj.all_root_paths() - - def set_root_environments(self): - """Set AYON_PROJECT_ROOT_* environments for current project.""" - self._roots_obj.set_root_environments() - - def root_names(self): - """Return root names for current project.""" - return self.root_names_from_templates(self.templates) - - def _root_keys_from_templates(self, data): - """Extract root key from templates in data. - - Args: - data (dict): Data that may contain templates as string. - - Return: - set: Set of all root names from templates as strings. - - Output example: `{"root[work]", "root[publish]"}` - """ - - output = set() - if isinstance(data, dict): - for value in data.values(): - for root in self._root_keys_from_templates(value): - output.add(root) - - elif isinstance(data, str): - for group in re.findall(self.root_key_regex, data): - output.add(group) - - return output - - def root_value_for_template(self, template): - """Returns value of root key from template.""" - root_templates = [] - for group in re.findall(self.root_key_regex, template): - root_templates.append("{" + group + "}") - - if not root_templates: - return None - - return root_templates[0].format(**{"root": self.roots}) - - def root_names_from_templates(self, templates): - """Extract root names form anatomy templates. - - Returns None if values in templates contain only "{root}". - Empty list is returned if there is no "root" in templates. - Else returns all root names from templates in list. - - RootCombinationError is raised when templates contain both root types, - basic "{root}" and with root name specification "{root[work]}". - - Args: - templates (dict): Anatomy templates where roots are not filled. - - Return: - list/None: List of all root names from templates as strings when - multiroot setup is used, otherwise None is returned. - """ - roots = list(self._root_keys_from_templates(templates)) - # Return empty list if no roots found in templates - if not roots: - return roots - - # Raise exception when root keys have roots with and without root name. - # Invalid output example: ["root", "root[project]", "root[render]"] - if len(roots) > 1 and "root" in roots: - raise RootCombinationError(roots) - - # Return None if "root" without root name in templates - if len(roots) == 1 and roots[0] == "root": - return None - - names = set() - for root in roots: - for group in re.findall(self.root_name_regex, root): - names.add(group) - return list(names) - - def fill_root(self, template_path): - """Fill template path where is only "root" key unfilled. - - Args: - template_path (str): Path with "root" key in. - Example path: "{root}/projects/MyProject/Shot01/Lighting/..." - - Return: - str: formatted path - """ - # NOTE does not care if there are different keys than "root" - return template_path.format(**{"root": self.roots}) - - @classmethod - def fill_root_with_path(cls, rootless_path, root_path): - """Fill path without filled "root" key with passed path. - - This is helper to fill root with different directory path than anatomy - has defined no matter if is single or multiroot. - - Output path is same as input path if `rootless_path` does not contain - unfilled root key. - - Args: - rootless_path (str): Path without filled "root" key. Example: - "{root[work]}/MyProject/..." - root_path (str): What should replace root key in `rootless_path`. - - Returns: - str: Path with filled root. - """ - output = str(rootless_path) - for group in re.findall(cls.root_key_regex, rootless_path): - replacement = "{" + group + "}" - output = output.replace(replacement, root_path) - - return output - - def replace_root_with_env_key(self, filepath, template=None): - """Replace root of path with environment key. - - # Example: - ## Project with roots: - ``` - { - "nas": { - "windows": P:/projects", - ... - } - ... - } - ``` - - ## Entered filepath - "P:/projects/project/asset/task/animation_v001.ma" - - ## Entered template - "<{}>" - - ## Output - "/project/asset/task/animation_v001.ma" - - Args: - filepath (str): Full file path where root should be replaced. - template (str): Optional template for environment key. Must - have one index format key. - Default value if not entered: "${}" - - Returns: - str: Path where root is replaced with environment root key. - - Raise: - ValueError: When project's roots were not found in entered path. - """ - success, rootless_path = self.find_root_template_from_path(filepath) - if not success: - raise ValueError( - "{}: Project's roots were not found in path: {}".format( - self.project_name, filepath - ) - ) - - data = self.root_environmets_fill_data(template) - return rootless_path.format(**data) - - def _project_doc_to_anatomy_data(self, project_doc): - """Convert project document to anatomy data. - - Probably should fill missing keys and values. - """ - - output = copy.deepcopy(project_doc["config"]) - output["attributes"] = copy.deepcopy(project_doc["data"]) - - return output - - def _apply_local_settings_on_anatomy_data( - self, anatomy_data, root_overrides - ): - """Apply local settings on anatomy data. - - ATM local settings can modify project roots. Project name is required - as local settings have data stored data by project's name. - - Local settings override root values in this order: - 1.) Check if local settings contain overrides for default project and - apply it's values on roots if there are any. - 2.) If passed `project_name` is not None then check project specific - overrides in local settings for the project and apply it's value on - roots if there are any. - - NOTE: Root values of default project from local settings are always - applied if are set. - - Args: - anatomy_data (dict): Data for anatomy. - root_overrides (dict): Data of local settings. - """ - - # Skip processing if roots for current active site are not available in - # local settings - if not root_overrides: - return - - current_platform = platform.system().lower() - - root_data = anatomy_data["roots"] - for root_name, path in root_overrides.items(): - if root_name not in root_data: - continue - anatomy_data["roots"][root_name][current_platform] = ( - path - ) - - -class CacheItem: - """Helper to cache data. - - Helper does not handle refresh of data and does not mark data as outdated. - Who uses the object should check of outdated state on his own will. - """ - - default_lifetime = 10 - - def __init__(self, lifetime=None): - self._data = None - self._cached = None - self._lifetime = lifetime or self.default_lifetime - - @property - def data(self): - """Cached data/object. - - Returns: - Any: Whatever was cached. - """ - - return self._data - - @property - def is_outdated(self): - """Item has outdated cache. - - Lifetime of cache item expired or was not yet set. - - Returns: - bool: Item is outdated. - """ - - if self._cached is None: - return True - return (time.time() - self._cached) > self._lifetime - - def update_data(self, data): - """Update cache of data. - - Args: - data (Any): Data to cache. - """ - - self._data = data - self._cached = time.time() - - -class Anatomy(BaseAnatomy): - _sync_server_addon_cache = CacheItem() - _project_cache = collections.defaultdict(CacheItem) - _default_site_id_cache = collections.defaultdict(CacheItem) - _root_overrides_cache = collections.defaultdict( - lambda: collections.defaultdict(CacheItem) - ) - - def __init__(self, project_name=None, site_name=None): - if not project_name: - project_name = os.environ.get("AYON_PROJECT_NAME") - - if not project_name: - raise ProjectNotSet(( - "Implementation bug: Project name is not set. Anatomy requires" - " to load data for specific project." - )) - - project_doc = self.get_project_doc_from_cache(project_name) - root_overrides = self._get_site_root_overrides(project_name, site_name) - - super(Anatomy, self).__init__(project_doc, root_overrides) - - @classmethod - def get_project_doc_from_cache(cls, project_name): - project_cache = cls._project_cache[project_name] - if project_cache.is_outdated: - project_cache.update_data(get_project(project_name)) - return copy.deepcopy(project_cache.data) - - @classmethod - def get_sync_server_addon(cls): - if cls._sync_server_addon_cache.is_outdated: - manager = AddonsManager() - cls._sync_server_addon_cache.update_data( - manager.get_enabled_addon("sync_server") - ) - return cls._sync_server_addon_cache.data - - @classmethod - def _get_studio_roots_overrides(cls, project_name): - """This would return 'studio' site override by local settings. - - Notes: - This logic handles local overrides of studio site which may be - available even when sync server is not enabled. - Handling of 'studio' and 'local' site was separated as preparation - for AYON development where that will be received from - separated sources. - - Args: - project_name (str): Name of project. - - Returns: - Union[Dict[str, str], None]): Local root overrides. - """ - if not project_name: - return - con = get_ayon_server_api_connection() - return con.get_project_roots_for_site( - project_name, get_local_site_id() - ) - - @classmethod - def _get_site_root_overrides(cls, project_name, site_name): - """Get root overrides for site. - - Args: - project_name (str): Project name for which root overrides should be - received. - site_name (Union[str, None]): Name of site for which root overrides - should be returned. - """ - - # First check if sync server is available and enabled - sync_server = cls.get_sync_server_addon() - if sync_server is None or not sync_server.enabled: - # QUESTION is ok to force 'studio' when site sync is not enabled? - site_name = "studio" - - elif not site_name: - # Use sync server to receive active site name - project_cache = cls._default_site_id_cache[project_name] - if project_cache.is_outdated: - project_cache.update_data( - sync_server.get_active_site_type(project_name) - ) - site_name = project_cache.data - - site_cache = cls._root_overrides_cache[project_name][site_name] - if site_cache.is_outdated: - if site_name == "studio": - # Handle studio root overrides without sync server - # - studio root overrides can be done even without sync server - roots_overrides = cls._get_studio_roots_overrides( - project_name - ) - else: - # Ask sync server to get roots overrides - roots_overrides = sync_server.get_site_root_overrides( - project_name, site_name - ) - site_cache.update_data(roots_overrides) - return site_cache.data - - -class AnatomyTemplateUnsolved(TemplateUnsolved): - """Exception for unsolved template when strict is set to True.""" - - msg = "Anatomy template \"{0}\" is unsolved.{1}{2}" - - -class AnatomyTemplateResult(TemplateResult): - rootless = None - - def __new__(cls, result, rootless_path): - new_obj = super(AnatomyTemplateResult, cls).__new__( - cls, - str(result), - result.template, - result.solved, - result.used_values, - result.missing_keys, - result.invalid_types - ) - new_obj.rootless = rootless_path - return new_obj - - def validate(self): - if not self.solved: - raise AnatomyTemplateUnsolved( - self.template, - self.missing_keys, - self.invalid_types - ) - - def copy(self): - tmp = TemplateResult( - str(self), - self.template, - self.solved, - self.used_values, - self.missing_keys, - self.invalid_types - ) - return self.__class__(tmp, self.rootless) - - def normalized(self): - """Convert to normalized path.""" - - tmp = TemplateResult( - os.path.normpath(self), - self.template, - self.solved, - self.used_values, - self.missing_keys, - self.invalid_types - ) - return self.__class__(tmp, self.rootless) - - -class AnatomyStringTemplate(StringTemplate): - """String template which has access to anatomy.""" - - def __init__(self, anatomy_templates, template): - self.anatomy_templates = anatomy_templates - super(AnatomyStringTemplate, self).__init__(template) - - def format(self, data): - """Format template and add 'root' key to data if not available. - - Args: - data (dict[str, Any]): Formatting data for template. - - Returns: - AnatomyTemplateResult: Formatting result. - """ - - anatomy_templates = self.anatomy_templates - if not data.get("root"): - data = copy.deepcopy(data) - data["root"] = anatomy_templates.anatomy.roots - result = StringTemplate.format(self, data) - rootless_path = anatomy_templates.rootless_path_from_result(result) - return AnatomyTemplateResult(result, rootless_path) - - -class AnatomyTemplates(TemplatesDict): - inner_key_pattern = re.compile(r"(\{@.*?[^{}0]*\})") - inner_key_name_pattern = re.compile(r"\{@(.*?[^{}0]*)\}") - - def __init__(self, anatomy): - super(AnatomyTemplates, self).__init__() - self.anatomy = anatomy - self.loaded_project = None - - def reset(self): - self._raw_templates = None - self._templates = None - self._objected_templates = None - - @property - def project_name(self): - return self.anatomy.project_name - - @property - def roots(self): - return self.anatomy.roots - - @property - def templates(self): - self._validate_discovery() - return self._templates - - @property - def objected_templates(self): - self._validate_discovery() - return self._objected_templates - - def _validate_discovery(self): - if self.project_name != self.loaded_project: - self.reset() - - if self._templates is None: - self._discover() - self.loaded_project = self.project_name - - def _format_value(self, value, data): - if isinstance(value, RootItem): - return self._solve_dict(value, data) - return super(AnatomyTemplates, self)._format_value(value, data) - - def set_templates(self, templates): - if not templates: - self.reset() - return - - self._raw_templates = copy.deepcopy(templates) - templates = copy.deepcopy(templates) - v_queue = collections.deque() - v_queue.append(templates) - while v_queue: - item = v_queue.popleft() - if not isinstance(item, dict): - continue - - for key in tuple(item.keys()): - value = item[key] - if isinstance(value, dict): - v_queue.append(value) - - elif ( - isinstance(value, six.string_types) - and "{task}" in value - ): - item[key] = value.replace("{task}", "{task[name]}") - - solved_templates = self.solve_template_inner_links(templates) - self._templates = solved_templates - self._objected_templates = self.create_objected_templates( - solved_templates - ) - - def _create_template_object(self, template): - return AnatomyStringTemplate(self, template) - - def default_templates(self): - """Return default templates data with solved inner keys.""" - return self.solve_template_inner_links( - self.anatomy["templates"] - ) - - def _discover(self): - """ Loads anatomy templates from yaml. - Default templates are loaded if project is not set or project does - not have set it's own. - TODO: create templates if not exist. - - Returns: - TemplatesResultDict: Contain templates data for current project of - default templates. - """ - - if self.project_name is None: - # QUESTION create project specific if not found? - raise AssertionError(( - "Project \"{0}\" does not have his own templates." - " Trying to use default." - ).format(self.project_name)) - - self.set_templates(self.anatomy["templates"]) - - @classmethod - def replace_inner_keys(cls, matches, value, key_values, key): - """Replacement of inner keys in template values.""" - for match in matches: - anatomy_sub_keys = ( - cls.inner_key_name_pattern.findall(match) - ) - if key in anatomy_sub_keys: - raise ValueError(( - "Unsolvable recursion in inner keys, " - "key: \"{}\" is in his own value." - " Can't determine source, please check Anatomy templates." - ).format(key)) - - for anatomy_sub_key in anatomy_sub_keys: - replace_value = key_values.get(anatomy_sub_key) - if replace_value is None: - raise KeyError(( - "Anatomy templates can't be filled." - " Anatomy key `{0}` has" - " invalid inner key `{1}`." - ).format(key, anatomy_sub_key)) - - if not ( - isinstance(replace_value, numbers.Number) - or isinstance(replace_value, six.string_types) - ): - raise ValueError(( - "Anatomy templates can't be filled." - " Anatomy key `{0}` has" - " invalid inner key `{1}`" - " with value `{2}`." - ).format(key, anatomy_sub_key, str(replace_value))) - - value = value.replace(match, str(replace_value)) - - return value - - @classmethod - def prepare_inner_keys(cls, key_values): - """Check values of inner keys. - - Check if inner key exist in template group and has valid value. - It is also required to avoid infinite loop with unsolvable recursion - when first inner key's value refers to second inner key's value where - first is used. - """ - keys_to_solve = set(key_values.keys()) - while True: - found = False - for key in tuple(keys_to_solve): - value = key_values[key] - - if isinstance(value, six.string_types): - matches = cls.inner_key_pattern.findall(value) - if not matches: - keys_to_solve.remove(key) - continue - - found = True - key_values[key] = cls.replace_inner_keys( - matches, value, key_values, key - ) - continue - - elif not isinstance(value, dict): - keys_to_solve.remove(key) - continue - - subdict_found = False - for _key, _value in tuple(value.items()): - matches = cls.inner_key_pattern.findall(_value) - if not matches: - continue - - subdict_found = True - found = True - key_values[key][_key] = cls.replace_inner_keys( - matches, _value, key_values, - "{}.{}".format(key, _key) - ) - - if not subdict_found: - keys_to_solve.remove(key) - - if not found: - break - - return key_values - - @classmethod - def solve_template_inner_links(cls, templates): - """Solve templates inner keys identified by "{@*}". - - Process is split into 2 parts. - First is collecting all global keys (keys in top hierarchy where value - is not dictionary). All global keys are set for all group keys (keys - in top hierarchy where value is dictionary). Value of a key is not - overridden in group if already contain value for the key. - - In second part all keys with "at" symbol in value are replaced with - value of the key afterward "at" symbol from the group. - - Args: - templates (dict): Raw templates data. - - Example: - templates:: - key_1: "value_1", - key_2: "{@key_1}/{filling_key}" - - group_1: - key_3: "value_3/{@key_2}" - - group_2: - key_2": "value_2" - key_4": "value_4/{@key_2}" - - output:: - key_1: "value_1" - key_2: "value_1/{filling_key}" - - group_1: { - key_1: "value_1" - key_2: "value_1/{filling_key}" - key_3: "value_3/value_1/{filling_key}" - - group_2: { - key_1: "value_1" - key_2: "value_2" - key_4: "value_3/value_2" - """ - default_key_values = templates.pop("defaults", {}) - for key, value in tuple(templates.items()): - if isinstance(value, dict): - continue - default_key_values[key] = templates.pop(key) - - # Pop "others" key before before expected keys are processed - other_templates = templates.pop("others") or {} - - keys_by_subkey = {} - for sub_key, sub_value in templates.items(): - key_values = {} - key_values.update(default_key_values) - key_values.update(sub_value) - keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) - - for sub_key, sub_value in other_templates.items(): - if sub_key in keys_by_subkey: - log.warning(( - "Key \"{}\" is duplicated in others. Skipping." - ).format(sub_key)) - continue - - key_values = {} - key_values.update(default_key_values) - key_values.update(sub_value) - keys_by_subkey[sub_key] = cls.prepare_inner_keys(key_values) - - default_keys_by_subkeys = cls.prepare_inner_keys(default_key_values) - - for key, value in default_keys_by_subkeys.items(): - keys_by_subkey[key] = value - - return keys_by_subkey - - @classmethod - def _dict_to_subkeys_list(cls, subdict, pre_keys=None): - if pre_keys is None: - pre_keys = [] - output = [] - for key in subdict: - value = subdict[key] - result = list(pre_keys) - result.append(key) - if isinstance(value, dict): - for item in cls._dict_to_subkeys_list(value, result): - output.append(item) - else: - output.append(result) - return output - - def _keys_to_dicts(self, key_list, value): - if not key_list: - return None - if len(key_list) == 1: - return {key_list[0]: value} - return {key_list[0]: self._keys_to_dicts(key_list[1:], value)} - - @classmethod - def rootless_path_from_result(cls, result): - """Calculate rootless path from formatting result. - - Args: - result (TemplateResult): Result of StringTemplate formatting. - - Returns: - str: Rootless path if result contains one of anatomy roots. - """ - - used_values = result.used_values - missing_keys = result.missing_keys - template = result.template - invalid_types = result.invalid_types - if ( - "root" not in used_values - or "root" in missing_keys - or "{root" not in template - ): - return - - for invalid_type in invalid_types: - if "root" in invalid_type: - return - - root_keys = cls._dict_to_subkeys_list({"root": used_values["root"]}) - if not root_keys: - return - - output = str(result) - for used_root_keys in root_keys: - if not used_root_keys: - continue - - used_value = used_values - root_key = None - for key in used_root_keys: - used_value = used_value[key] - if root_key is None: - root_key = key - else: - root_key += "[{}]".format(key) - - root_key = "{" + root_key + "}" - output = output.replace(str(used_value), root_key) - - return output - - def format(self, data, strict=True): - copy_data = copy.deepcopy(data) - roots = self.roots - if roots: - copy_data["root"] = roots - result = super(AnatomyTemplates, self).format(copy_data) - result.strict = strict - return result - - def format_all(self, in_data, only_keys=True): - """ Solves templates based on entered data. - - Args: - data (dict): Containing keys to be filled into template. - - Returns: - TemplatesResultDict: Output `TemplateResult` have `strict` - attribute set to False so accessing unfilled keys in templates - won't raise any exceptions. - """ - return self.format(in_data, strict=False) - - -class RootItem(FormatObject): - """Represents one item or roots. - - Holds raw data of root item specification. Raw data contain value - for each platform, but current platform value is used when object - is used for formatting of template. - - Args: - root_raw_data (dict): Dictionary containing root values by platform - names. ["windows", "linux" and "darwin"] - name (str, optional): Root name which is representing. Used with - multi root setup otherwise None value is expected. - parent_keys (list, optional): All dictionary parent keys. Values of - `parent_keys` are used for get full key which RootItem is - representing. Used for replacing root value in path with - formattable key. e.g. parent_keys == ["work"] -> {root[work]} - parent (object, optional): It is expected to be `Roots` object. - Value of `parent` won't affect code logic much. - """ - - def __init__( - self, root_raw_data, name=None, parent_keys=None, parent=None - ): - lowered_platform_keys = {} - for key, value in root_raw_data.items(): - lowered_platform_keys[key.lower()] = value - self.raw_data = lowered_platform_keys - self.cleaned_data = self._clean_roots(lowered_platform_keys) - self.name = name - self.parent_keys = parent_keys or [] - self.parent = parent - - self.available_platforms = list(lowered_platform_keys.keys()) - self.value = lowered_platform_keys.get(platform.system().lower()) - self.clean_value = self.clean_root(self.value) - - def __format__(self, *args, **kwargs): - return self.value.__format__(*args, **kwargs) - - def __str__(self): - return str(self.value) - - def __repr__(self): - return self.__str__() - - def __getitem__(self, key): - if isinstance(key, numbers.Number): - return self.value[key] - - additional_info = "" - if self.parent and self.parent.project_name: - additional_info += " for project \"{}\"".format( - self.parent.project_name - ) - - raise AssertionError( - "Root key \"{}\" is missing{}.".format( - key, additional_info - ) - ) - - def full_key(self): - """Full key value for dictionary formatting in template. - - Returns: - str: Return full replacement key for formatting. This helps when - multiple roots are set. In that case e.g. `"root[work]"` is - returned. - """ - if not self.name: - return "root" - - joined_parent_keys = "".join( - ["[{}]".format(key) for key in self.parent_keys] - ) - return "root{}".format(joined_parent_keys) - - def clean_path(self, path): - """Just replace backslashes with forward slashes.""" - return str(path).replace("\\", "/") - - def clean_root(self, root): - """Makes sure root value does not end with slash.""" - if root: - root = self.clean_path(root) - while root.endswith("/"): - root = root[:-1] - return root - - def _clean_roots(self, raw_data): - """Clean all values of raw root item values.""" - cleaned = {} - for key, value in raw_data.items(): - cleaned[key] = self.clean_root(value) - return cleaned - - def path_remapper(self, path, dst_platform=None, src_platform=None): - """Remap path for specific platform. - - Args: - path (str): Source path which need to be remapped. - dst_platform (str, optional): Specify destination platform - for which remapping should happen. - src_platform (str, optional): Specify source platform. This is - recommended to not use and keep unset until you really want - to use specific platform. - roots (dict/RootItem/None, optional): It is possible to remap - path with different roots then instance where method was - called has. - - Returns: - str/None: When path does not contain known root then - None is returned else returns remapped path with "{root}" - or "{root[]}". - """ - cleaned_path = self.clean_path(path) - if dst_platform: - dst_root_clean = self.cleaned_data.get(dst_platform) - if not dst_root_clean: - key_part = "" - full_key = self.full_key() - if full_key != "root": - key_part += "\"{}\" ".format(full_key) - - log.warning( - "Root {}miss platform \"{}\" definition.".format( - key_part, dst_platform - ) - ) - return None - - if cleaned_path.startswith(dst_root_clean): - return cleaned_path - - if src_platform: - src_root_clean = self.cleaned_data.get(src_platform) - if src_root_clean is None: - log.warning( - "Root \"{}\" miss platform \"{}\" definition.".format( - self.full_key(), src_platform - ) - ) - return None - - if not cleaned_path.startswith(src_root_clean): - return None - - subpath = cleaned_path[len(src_root_clean):] - if dst_platform: - # `dst_root_clean` is used from upper condition - return dst_root_clean + subpath - return self.clean_value + subpath - - result, template = self.find_root_template_from_path(path) - if not result: - return None - - def parent_dict(keys, value): - if not keys: - return value - - key = keys.pop(0) - return {key: parent_dict(keys, value)} - - if dst_platform: - format_value = parent_dict(list(self.parent_keys), dst_root_clean) - else: - format_value = parent_dict(list(self.parent_keys), self.value) - - return template.format(**{"root": format_value}) - - def find_root_template_from_path(self, path): - """Replaces known root value with formattable key in path. - - All platform values are checked for this replacement. - - Args: - path (str): Path where root value should be found. - - Returns: - tuple: Tuple contain 2 values: `success` (bool) and `path` (str). - When success it True then path should contain replaced root - value with formattable key. - - Example: - When input path is:: - "C:/windows/path/root/projects/my_project/file.ext" - - And raw data of item looks like:: - { - "windows": "C:/windows/path/root", - "linux": "/mount/root" - } - - Output will be:: - (True, "{root}/projects/my_project/file.ext") - - If any of raw data value wouldn't match path's root output is:: - (False, "C:/windows/path/root/projects/my_project/file.ext") - """ - result = False - output = str(path) - - mod_path = self.clean_path(path) - for root_os, root_path in self.cleaned_data.items(): - # Skip empty paths - if not root_path: - continue - - _mod_path = mod_path # reset to original cleaned value - if root_os == "windows": - root_path = root_path.lower() - _mod_path = _mod_path.lower() - - if _mod_path.startswith(root_path): - result = True - replacement = "{" + self.full_key() + "}" - output = replacement + mod_path[len(root_path):] - break - - return (result, output) - - -class Roots: - """Object which should be used for formatting "root" key in templates. - - Args: - anatomy Anatomy: Anatomy object created for a specific project. - """ - - env_prefix = "AYON_PROJECT_ROOT" - roots_filename = "roots.json" - - def __init__(self, anatomy): - self.anatomy = anatomy - self.loaded_project = None - self._roots = None - - def __format__(self, *args, **kwargs): - return self.roots.__format__(*args, **kwargs) - - def __getitem__(self, key): - return self.roots[key] - - def reset(self): - """Reset current roots value.""" - self._roots = None - - def path_remapper( - self, path, dst_platform=None, src_platform=None, roots=None - ): - """Remap path for specific platform. - - Args: - path (str): Source path which need to be remapped. - dst_platform (str, optional): Specify destination platform - for which remapping should happen. - src_platform (str, optional): Specify source platform. This is - recommended to not use and keep unset until you really want - to use specific platform. - roots (dict/RootItem/None, optional): It is possible to remap - path with different roots then instance where method was - called has. - - Returns: - str/None: When path does not contain known root then - None is returned else returns remapped path with "{root}" - or "{root[]}". - """ - if roots is None: - roots = self.roots - - if roots is None: - raise ValueError("Roots are not set. Can't find path.") - - if "{root" in path: - path = path.format(**{"root": roots}) - # If `dst_platform` is not specified then return else continue. - if not dst_platform: - return path - - if isinstance(roots, RootItem): - return roots.path_remapper(path, dst_platform, src_platform) - - for _root in roots.values(): - result = self.path_remapper( - path, dst_platform, src_platform, _root - ) - if result is not None: - return result - - def find_root_template_from_path(self, path, roots=None): - """Find root value in entered path and replace it with formatting key. - - Args: - path (str): Source path where root will be searched. - roots (Roots/dict, optional): It is possible to use different - roots than instance where method was triggered has. - - Returns: - tuple: Output contains tuple with bool representing success as - first value and path with or without replaced root with - formatting key as second value. - - Raises: - ValueError: When roots are not entered and can't be loaded. - """ - if roots is None: - log.debug( - "Looking for matching root in path \"{}\".".format(path) - ) - roots = self.roots - - if roots is None: - raise ValueError("Roots are not set. Can't find path.") - - if isinstance(roots, RootItem): - return roots.find_root_template_from_path(path) - - for root_name, _root in roots.items(): - success, result = self.find_root_template_from_path(path, _root) - if success: - log.info("Found match in root \"{}\".".format(root_name)) - return success, result - - log.warning("No matching root was found in current setting.") - return (False, path) - - def set_root_environments(self): - """Set root environments for current project.""" - for key, value in self.root_environments().items(): - os.environ[key] = value - - def root_environments(self): - """Use root keys to create unique keys for environment variables. - - Concatenates prefix "AYON_PROJECT_ROOT_" with root keys to create - unique keys. - - Returns: - dict: Result is `{(str): (str)}` dicitonary where key represents - unique key concatenated by keys and value is root value of - current platform root. - - Example: - With raw root values:: - "work": { - "windows": "P:/projects/work", - "linux": "/mnt/share/projects/work", - "darwin": "/darwin/path/work" - }, - "publish": { - "windows": "P:/projects/publish", - "linux": "/mnt/share/projects/publish", - "darwin": "/darwin/path/publish" - } - - Result on windows platform:: - { - "AYON_PROJECT_ROOT_WORK": "P:/projects/work", - "AYON_PROJECT_ROOT_PUBLISH": "P:/projects/publish" - } - - """ - return self._root_environments() - - def all_root_paths(self, roots=None): - """Return all paths for all roots of all platforms.""" - if roots is None: - roots = self.roots - - output = [] - if isinstance(roots, RootItem): - for value in roots.raw_data.values(): - output.append(value) - return output - - for _roots in roots.values(): - output.extend(self.all_root_paths(_roots)) - return output - - def _root_environments(self, keys=None, roots=None): - if not keys: - keys = [] - if roots is None: - roots = self.roots - - if isinstance(roots, RootItem): - key_items = [self.env_prefix] - for _key in keys: - key_items.append(_key.upper()) - - key = "_".join(key_items) - # Make sure key and value does not contain unicode - # - can happen in Python 2 hosts - return {str(key): str(roots.value)} - - output = {} - for _key, _value in roots.items(): - _keys = list(keys) - _keys.append(_key) - output.update(self._root_environments(_keys, _value)) - return output - - def root_environmets_fill_data(self, template=None): - """Environment variable values in dictionary for rootless path. - - Args: - template (str): Template for environment variable key fill. - By default is set to `"${}"`. - """ - if template is None: - template = "${}" - return self._root_environmets_fill_data(template) - - def _root_environmets_fill_data(self, template, keys=None, roots=None): - if keys is None and roots is None: - return { - "root": self._root_environmets_fill_data( - template, [], self.roots - ) - } - - if isinstance(roots, RootItem): - key_items = [Roots.env_prefix] - for _key in keys: - key_items.append(_key.upper()) - key = "_".join(key_items) - return template.format(key) - - output = {} - for key, value in roots.items(): - _keys = list(keys) - _keys.append(key) - output[key] = self._root_environmets_fill_data( - template, _keys, value - ) - return output - - @property - def project_name(self): - """Return project name which will be used for loading root values.""" - return self.anatomy.project_name - - @property - def roots(self): - """Property for filling "root" key in templates. - - This property returns roots for current project or default root values. - Warning: - Default roots value may cause issues when project use different - roots settings. That may happen when project use multiroot - templates but default roots miss their keys. - """ - if self.project_name != self.loaded_project: - self._roots = None - - if self._roots is None: - self._roots = self._discover() - self.loaded_project = self.project_name - return self._roots - - def _discover(self): - """ Loads current project's roots or default. - - Default roots are loaded if project override's does not contain roots. - - Returns: - `RootItem` or `dict` with multiple `RootItem`s when multiroot - setting is used. - """ - - return self._parse_dict(self.anatomy["roots"], parent=self) - - @staticmethod - def _parse_dict(data, key=None, parent_keys=None, parent=None): - """Parse roots raw data into RootItem or dictionary with RootItems. - - Converting raw roots data to `RootItem` helps to handle platform keys. - This method is recursive to be able handle multiroot setup and - is static to be able to load default roots without creating new object. - - Args: - data (dict): Should contain raw roots data to be parsed. - key (str, optional): Current root key. Set by recursion. - parent_keys (list): Parent dictionary keys. Set by recursion. - parent (Roots, optional): Parent object set in `RootItem` - helps to keep RootItem instance updated with `Roots` object. - - Returns: - `RootItem` or `dict` with multiple `RootItem`s when multiroot - setting is used. - """ - if not parent_keys: - parent_keys = [] - is_last = False - for value in data.values(): - if isinstance(value, six.string_types): - is_last = True - break - - if is_last: - return RootItem(data, key, parent_keys, parent=parent) - - output = {} - for _key, value in data.items(): - _parent_keys = list(parent_keys) - _parent_keys.append(_key) - output[_key] = Roots._parse_dict(value, _key, _parent_keys, parent) - return output diff --git a/client/ayon_core/pipeline/anatomy/__init__.py b/client/ayon_core/pipeline/anatomy/__init__.py new file mode 100644 index 0000000000..336d09ccaa --- /dev/null +++ b/client/ayon_core/pipeline/anatomy/__init__.py @@ -0,0 +1,17 @@ +from .exceptions import ( + ProjectNotSet, + RootCombinationError, + TemplateMissingKey, + AnatomyTemplateUnsolved, +) +from .anatomy import Anatomy + + +__all__ = ( + "ProjectNotSet", + "RootCombinationError", + "TemplateMissingKey", + "AnatomyTemplateUnsolved", + + "Anatomy", +) diff --git a/client/ayon_core/pipeline/anatomy/anatomy.py b/client/ayon_core/pipeline/anatomy/anatomy.py new file mode 100644 index 0000000000..2aa8eeddbc --- /dev/null +++ b/client/ayon_core/pipeline/anatomy/anatomy.py @@ -0,0 +1,556 @@ +import os +import re +import copy +import platform +import collections +import time + +import ayon_api + +from ayon_core.lib import Logger, get_local_site_id, StringTemplate +from ayon_core.addon import AddonsManager + +from .exceptions import RootCombinationError, ProjectNotSet +from .roots import AnatomyRoots +from .templates import AnatomyTemplates + +log = Logger.get_logger(__name__) + + +class BaseAnatomy(object): + """Anatomy module helps to keep project settings. + + Wraps key project specifications, AnatomyTemplates and AnatomyRoots. + """ + root_key_regex = re.compile(r"{(root?[^}]+)}") + root_name_regex = re.compile(r"root\[([^]]+)\]") + + def __init__(self, project_entity, root_overrides=None): + self._project_name = project_entity["name"] + self._project_code = project_entity["code"] + + self._data = self._prepare_anatomy_data( + project_entity, root_overrides + ) + self._templates_obj = AnatomyTemplates(self) + self._roots_obj = AnatomyRoots(self) + + # Anatomy used as dictionary + # - implemented only getters returning copy + def __getitem__(self, key): + return copy.deepcopy(self._data[key]) + + def get(self, key, default=None): + if key not in self._data: + return default + return copy.deepcopy(self._data[key]) + + def keys(self): + return copy.deepcopy(self._data).keys() + + def values(self): + return copy.deepcopy(self._data).values() + + def items(self): + return copy.deepcopy(self._data).items() + + @property + def project_name(self): + """Project name for which is anatomy prepared. + + Returns: + str: Project name. + + """ + return self._project_name + + @property + def project_code(self): + """Project name for which is anatomy prepared. + + Returns: + str: Project code. + + """ + return self._project_code + + def _prepare_anatomy_data(self, project_entity, root_overrides): + """Prepare anatomy data for further processing. + + Method added to replace `{task}` with `{task[name]}` in templates. + """ + + anatomy_data = self._project_entity_to_anatomy_data(project_entity) + + self._apply_local_settings_on_anatomy_data( + anatomy_data, + root_overrides + ) + + return anatomy_data + + @property + def templates(self): + """Wrap property `templates` of Anatomy's AnatomyTemplates instance.""" + return self._templates_obj.templates + + @property + def templates_obj(self): + """Return `AnatomyTemplates` object of current Anatomy instance.""" + return self._templates_obj + + def get_template_item(self, *args, **kwargs): + """Get template item from category. + + Args: + category_name (str): Category name. + template_name (str): Template name. + subkey (Optional[str]): Subkey name. + default (Any): Default value. + + Returns: + Any: Template item, subkey value as AnatomyStringTemplate or None. + + """ + return self._templates_obj.get_template_item(*args, **kwargs) + + def format(self, *args, **kwargs): + """Wrap `format` method of Anatomy's `templates_obj`.""" + return self._templates_obj.format(*args, **kwargs) + + def format_all(self, *args, **kwargs): + """Wrap `format_all` method of Anatomy's `templates_obj`. + + Deprecated: + Use ``format`` method with ``strict=False`` instead. + + """ + return self._templates_obj.format_all(*args, **kwargs) + + @property + def roots(self): + """Wrap `roots` property of Anatomy's `roots_obj`.""" + return self._roots_obj.roots + + @property + def roots_obj(self): + """Roots wrapper object. + + Returns: + AnatomyRoots: Roots wrapper. + + """ + return self._roots_obj + + def root_environments(self): + """Return AYON_PROJECT_ROOT_* environments for current project.""" + return self._roots_obj.root_environments() + + def root_environmets_fill_data(self, template=None): + """Environment variable values in dictionary for rootless path. + + Args: + template (str): Template for environment variable key fill. + By default is set to `"${}"`. + """ + return self.roots_obj.root_environmets_fill_data(template) + + def find_root_template_from_path(self, *args, **kwargs): + """Wrapper for AnatomyRoots `find_root_template_from_path`.""" + return self.roots_obj.find_root_template_from_path(*args, **kwargs) + + def path_remapper(self, *args, **kwargs): + """Wrapper for AnatomyRoots `path_remapper`.""" + return self.roots_obj.path_remapper(*args, **kwargs) + + def all_root_paths(self): + """Wrapper for AnatomyRoots `all_root_paths`.""" + return self.roots_obj.all_root_paths() + + def set_root_environments(self): + """Set AYON_PROJECT_ROOT_* environments for current project.""" + self._roots_obj.set_root_environments() + + def root_names(self): + """Return root names for current project.""" + return self.root_names_from_templates(self.templates) + + def _root_keys_from_templates(self, data): + """Extract root key from templates in data. + + Args: + data (dict): Data that may contain templates as string. + + Return: + set: Set of all root names from templates as strings. + + Output example: `{"root[work]", "root[publish]"}` + """ + + output = set() + keys_queue = collections.deque() + keys_queue.append(data) + while keys_queue: + queue_data = keys_queue.popleft() + if isinstance(queue_data, StringTemplate): + queue_data = queue_data.template + + if isinstance(queue_data, dict): + for value in queue_data.values(): + keys_queue.append(value) + + elif isinstance(queue_data, str): + for group in re.findall(self.root_key_regex, queue_data): + output.add(group) + + return output + + def root_value_for_template(self, template): + """Returns value of root key from template.""" + if isinstance(template, StringTemplate): + template = template.template + root_templates = [] + for group in re.findall(self.root_key_regex, template): + root_templates.append("{" + group + "}") + + if not root_templates: + return None + + return root_templates[0].format(**{"root": self.roots}) + + def root_names_from_templates(self, templates): + """Extract root names form anatomy templates. + + Returns None if values in templates contain only "{root}". + Empty list is returned if there is no "root" in templates. + Else returns all root names from templates in list. + + RootCombinationError is raised when templates contain both root types, + basic "{root}" and with root name specification "{root[work]}". + + Args: + templates (dict): Anatomy templates where roots are not filled. + + Return: + list/None: List of all root names from templates as strings when + multiroot setup is used, otherwise None is returned. + """ + roots = list(self._root_keys_from_templates(templates)) + # Return empty list if no roots found in templates + if not roots: + return roots + + # Raise exception when root keys have roots with and without root name. + # Invalid output example: ["root", "root[project]", "root[render]"] + if len(roots) > 1 and "root" in roots: + raise RootCombinationError(roots) + + # Return None if "root" without root name in templates + if len(roots) == 1 and roots[0] == "root": + return None + + names = set() + for root in roots: + for group in re.findall(self.root_name_regex, root): + names.add(group) + return list(names) + + def fill_root(self, template_path): + """Fill template path where is only "root" key unfilled. + + Args: + template_path (str): Path with "root" key in. + Example path: "{root}/projects/MyProject/Shot01/Lighting/..." + + Return: + str: formatted path + """ + # NOTE does not care if there are different keys than "root" + return template_path.format(**{"root": self.roots}) + + @classmethod + def fill_root_with_path(cls, rootless_path, root_path): + """Fill path without filled "root" key with passed path. + + This is helper to fill root with different directory path than anatomy + has defined no matter if is single or multiroot. + + Output path is same as input path if `rootless_path` does not contain + unfilled root key. + + Args: + rootless_path (str): Path without filled "root" key. Example: + "{root[work]}/MyProject/..." + root_path (str): What should replace root key in `rootless_path`. + + Returns: + str: Path with filled root. + """ + output = str(rootless_path) + for group in re.findall(cls.root_key_regex, rootless_path): + replacement = "{" + group + "}" + output = output.replace(replacement, root_path) + + return output + + def replace_root_with_env_key(self, filepath, template=None): + """Replace root of path with environment key. + + # Example: + ## Project with roots: + ``` + { + "nas": { + "windows": P:/projects", + ... + } + ... + } + ``` + + ## Entered filepath + "P:/projects/project/folder/task/animation_v001.ma" + + ## Entered template + "<{}>" + + ## Output + "/project/folder/task/animation_v001.ma" + + Args: + filepath (str): Full file path where root should be replaced. + template (str): Optional template for environment key. Must + have one index format key. + Default value if not entered: "${}" + + Returns: + str: Path where root is replaced with environment root key. + + Raise: + ValueError: When project's roots were not found in entered path. + """ + success, rootless_path = self.find_root_template_from_path(filepath) + if not success: + raise ValueError( + "{}: Project's roots were not found in path: {}".format( + self.project_name, filepath + ) + ) + + data = self.root_environmets_fill_data(template) + return rootless_path.format(**data) + + def _project_entity_to_anatomy_data(self, project_entity): + """Convert project document to anatomy data. + + Probably should fill missing keys and values. + """ + + output = copy.deepcopy(project_entity["config"]) + # TODO remove AYON convertion + task_types = copy.deepcopy(project_entity["taskTypes"]) + new_task_types = {} + for task_type in task_types: + name = task_type["name"] + new_task_types[name] = task_type + output["tasks"] = new_task_types + output["attributes"] = copy.deepcopy(project_entity["attrib"]) + + return output + + def _apply_local_settings_on_anatomy_data( + self, anatomy_data, root_overrides + ): + """Apply local settings on anatomy data. + + ATM local settings can modify project roots. Project name is required + as local settings have data stored data by project's name. + + Local settings override root values in this order: + 1.) Check if local settings contain overrides for default project and + apply it's values on roots if there are any. + 2.) If passed `project_name` is not None then check project specific + overrides in local settings for the project and apply it's value on + roots if there are any. + + NOTE: Root values of default project from local settings are always + applied if are set. + + Args: + anatomy_data (dict): Data for anatomy. + root_overrides (dict): Data of local settings. + """ + + # Skip processing if roots for current active site are not available in + # local settings + if not root_overrides: + return + + current_platform = platform.system().lower() + + root_data = anatomy_data["roots"] + for root_name, path in root_overrides.items(): + if root_name not in root_data: + continue + anatomy_data["roots"][root_name][current_platform] = ( + path + ) + + +class CacheItem: + """Helper to cache data. + + Helper does not handle refresh of data and does not mark data as outdated. + Who uses the object should check of outdated state on his own will. + """ + + default_lifetime = 10 + + def __init__(self, lifetime=None): + self._data = None + self._cached = None + self._lifetime = lifetime or self.default_lifetime + + @property + def data(self): + """Cached data/object. + + Returns: + Any: Whatever was cached. + """ + + return self._data + + @property + def is_outdated(self): + """Item has outdated cache. + + Lifetime of cache item expired or was not yet set. + + Returns: + bool: Item is outdated. + """ + + if self._cached is None: + return True + return (time.time() - self._cached) > self._lifetime + + def update_data(self, data): + """Update cache of data. + + Args: + data (Any): Data to cache. + """ + + self._data = data + self._cached = time.time() + + +class Anatomy(BaseAnatomy): + _sitesync_addon_cache = CacheItem() + _project_cache = collections.defaultdict(CacheItem) + _default_site_id_cache = collections.defaultdict(CacheItem) + _root_overrides_cache = collections.defaultdict( + lambda: collections.defaultdict(CacheItem) + ) + + def __init__( + self, project_name=None, site_name=None, project_entity=None + ): + if not project_name: + project_name = os.environ.get("AYON_PROJECT_NAME") + + if not project_name: + raise ProjectNotSet(( + "Implementation bug: Project name is not set. Anatomy requires" + " to load data for specific project." + )) + + if not project_entity: + project_entity = self.get_project_entity_from_cache(project_name) + root_overrides = self._get_site_root_overrides( + project_name, site_name + ) + + super(Anatomy, self).__init__(project_entity, root_overrides) + + @classmethod + def get_project_entity_from_cache(cls, project_name): + project_cache = cls._project_cache[project_name] + if project_cache.is_outdated: + project_cache.update_data(ayon_api.get_project(project_name)) + return copy.deepcopy(project_cache.data) + + @classmethod + def get_sitesync_addon(cls): + if cls._sitesync_addon_cache.is_outdated: + manager = AddonsManager() + cls._sitesync_addon_cache.update_data( + manager.get_enabled_addon("sitesync") + ) + return cls._sitesync_addon_cache.data + + @classmethod + def _get_studio_roots_overrides(cls, project_name): + """This would return 'studio' site override by local settings. + + Notes: + This logic handles local overrides of studio site which may be + available even when sync server is not enabled. + Handling of 'studio' and 'local' site was separated as preparation + for AYON development where that will be received from + separated sources. + + Args: + project_name (str): Name of project. + + Returns: + Union[Dict[str, str], None]): Local root overrides. + """ + if not project_name: + return + return ayon_api.get_project_roots_for_site( + project_name, get_local_site_id() + ) + + @classmethod + def _get_site_root_overrides(cls, project_name, site_name): + """Get root overrides for site. + + Args: + project_name (str): Project name for which root overrides should be + received. + site_name (Union[str, None]): Name of site for which root overrides + should be returned. + """ + + # First check if sync server is available and enabled + sitesync_addon = cls.get_sitesync_addon() + if sitesync_addon is None or not sitesync_addon.enabled: + # QUESTION is ok to force 'studio' when site sync is not enabled? + site_name = "studio" + + elif not site_name: + # Use sync server to receive active site name + project_cache = cls._default_site_id_cache[project_name] + if project_cache.is_outdated: + project_cache.update_data( + sitesync_addon.get_active_site_type(project_name) + ) + site_name = project_cache.data + + site_cache = cls._root_overrides_cache[project_name][site_name] + if site_cache.is_outdated: + if site_name == "studio": + # Handle studio root overrides without sync server + # - studio root overrides can be done even without sync server + roots_overrides = cls._get_studio_roots_overrides( + project_name + ) + else: + # Ask sync server to get roots overrides + roots_overrides = sitesync_addon.get_site_root_overrides( + project_name, site_name + ) + site_cache.update_data(roots_overrides) + return site_cache.data diff --git a/client/ayon_core/pipeline/anatomy/exceptions.py b/client/ayon_core/pipeline/anatomy/exceptions.py new file mode 100644 index 0000000000..39f116baf0 --- /dev/null +++ b/client/ayon_core/pipeline/anatomy/exceptions.py @@ -0,0 +1,39 @@ +from ayon_core.lib.path_templates import TemplateUnsolved + + +class ProjectNotSet(Exception): + """Exception raised when is created Anatomy without project name.""" + + +class RootCombinationError(Exception): + """This exception is raised when templates has combined root types.""" + + def __init__(self, roots): + joined_roots = ", ".join( + ["\"{}\"".format(_root) for _root in roots] + ) + # TODO better error message + msg = ( + "Combination of root with and" + " without root name in AnatomyTemplates. {}" + ).format(joined_roots) + + super(RootCombinationError, self).__init__(msg) + + +class TemplateMissingKey(Exception): + """Exception for cases when key does not exist in template.""" + + msg = "Template key '{}' was not found." + + def __init__(self, parents): + parent_join = "".join(["[\"{0}\"]".format(key) for key in parents]) + super(TemplateMissingKey, self).__init__( + self.msg.format(parent_join) + ) + + +class AnatomyTemplateUnsolved(TemplateUnsolved): + """Exception for unsolved template when strict is set to True.""" + + msg = "Anatomy template \"{0}\" is unsolved.{1}{2}" diff --git a/client/ayon_core/pipeline/anatomy/roots.py b/client/ayon_core/pipeline/anatomy/roots.py new file mode 100644 index 0000000000..2773559d49 --- /dev/null +++ b/client/ayon_core/pipeline/anatomy/roots.py @@ -0,0 +1,524 @@ +import os +import platform +import numbers + +from ayon_core.lib import Logger +from ayon_core.lib.path_templates import FormatObject + + +class RootItem(FormatObject): + """Represents one item or roots. + + Holds raw data of root item specification. Raw data contain value + for each platform, but current platform value is used when object + is used for formatting of template. + + Args: + parent (AnatomyRoots): Parent object. + root_raw_data (dict): Dictionary containing root values by platform + names. ["windows", "linux" and "darwin"] + name (str): Root name which is representing. Used with + multi root setup otherwise None value is expected. + """ + def __init__(self, parent, root_raw_data, name): + super(RootItem, self).__init__() + self._log = None + lowered_platform_keys = {} + for key, value in root_raw_data.items(): + lowered_platform_keys[key.lower()] = value + self.raw_data = lowered_platform_keys + self.cleaned_data = self._clean_roots(lowered_platform_keys) + self.name = name + self.parent = parent + + self.available_platforms = set(lowered_platform_keys.keys()) + self.value = lowered_platform_keys.get(platform.system().lower()) + self.clean_value = self._clean_root(self.value) + + def __format__(self, *args, **kwargs): + return self.value.__format__(*args, **kwargs) + + def __str__(self): + return str(self.value) + + def __repr__(self): + return self.__str__() + + def __getitem__(self, key): + if isinstance(key, numbers.Number): + return self.value[key] + + additional_info = "" + if self.parent and self.parent.project_name: + additional_info += " for project \"{}\"".format( + self.parent.project_name + ) + + raise KeyError( + "Root key \"{}\" is missing{}.".format( + key, additional_info + ) + ) + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @property + def full_key(self): + """Full key value for dictionary formatting in template. + + Returns: + str: Return full replacement key for formatting. This helps when + multiple roots are set. In that case e.g. `"root[work]"` is + returned. + + """ + return "root[{}]".format(self.name) + + @staticmethod + def _clean_path(path): + """Just replace backslashes with forward slashes. + + Args: + path (str): Path which should be cleaned. + + Returns: + str: Cleaned path with forward slashes. + + """ + return str(path).replace("\\", "/") + + def _clean_root(self, root): + """Clean root value. + + Args: + root (str): Root value which should be cleaned. + + Returns: + str: Cleaned root value. + + """ + return self._clean_path(root).rstrip("/") + + def _clean_roots(self, raw_data): + """Clean all values of raw root item values.""" + cleaned = {} + for key, value in raw_data.items(): + cleaned[key] = self._clean_root(value) + return cleaned + + def path_remapper(self, path, dst_platform=None, src_platform=None): + """Remap path for specific platform. + + Args: + path (str): Source path which need to be remapped. + dst_platform (str, optional): Specify destination platform + for which remapping should happen. + src_platform (str, optional): Specify source platform. This is + recommended to not use and keep unset until you really want + to use specific platform. + + Returns: + Union[str, None]: When path does not contain known root then + None is returned else returns remapped path with + "{root[]}". + + """ + cleaned_path = self._clean_path(path) + if dst_platform: + dst_root_clean = self.cleaned_data.get(dst_platform) + if not dst_root_clean: + self.log.warning( + "Root \"{}\" miss platform \"{}\" definition.".format( + self.full_key, dst_platform + ) + ) + return None + + if cleaned_path.startswith(dst_root_clean): + return cleaned_path + + if src_platform: + src_root_clean = self.cleaned_data.get(src_platform) + if src_root_clean is None: + self.log.warning( + "Root \"{}\" miss platform \"{}\" definition.".format( + self.full_key, src_platform + ) + ) + return None + + if not cleaned_path.startswith(src_root_clean): + return None + + subpath = cleaned_path[len(src_root_clean):] + if dst_platform: + # `dst_root_clean` is used from upper condition + return dst_root_clean + subpath + return self.clean_value + subpath + + result, template = self.find_root_template_from_path(path) + if not result: + return None + + if dst_platform: + fill_data = {self.name: dst_root_clean} + else: + fill_data = {self.name: self.value} + + return template.format(**{"root": fill_data}) + + def find_root_template_from_path(self, path): + """Replaces known root value with formattable key in path. + + All platform values are checked for this replacement. + + Args: + path (str): Path where root value should be found. + + Returns: + tuple: Tuple contain 2 values: `success` (bool) and `path` (str). + When success it True then path should contain replaced root + value with formattable key. + + Example: + When input path is:: + "C:/windows/path/root/projects/my_project/file.ext" + + And raw data of item looks like:: + { + "windows": "C:/windows/path/root", + "linux": "/mount/root" + } + + Output will be:: + (True, "{root}/projects/my_project/file.ext") + + If any of raw data value wouldn't match path's root output is:: + (False, "C:/windows/path/root/projects/my_project/file.ext") + """ + result = False + output = str(path) + + mod_path = self._clean_path(path) + for root_os, root_path in self.cleaned_data.items(): + # Skip empty paths + if not root_path: + continue + + _mod_path = mod_path # reset to original cleaned value + if root_os == "windows": + root_path = root_path.lower() + _mod_path = _mod_path.lower() + + if _mod_path.startswith(root_path): + result = True + replacement = "{" + self.full_key + "}" + output = replacement + mod_path[len(root_path):] + break + + return (result, output) + + +class AnatomyRoots: + """Object which should be used for formatting "root" key in templates. + + Args: + anatomy (Anatomy): Anatomy object created for a specific project. + """ + + env_prefix = "AYON_PROJECT_ROOT" + + def __init__(self, anatomy): + self._log = None + self._anatomy = anatomy + self._loaded_project = None + self._roots = None + + def __format__(self, *args, **kwargs): + return self.roots.__format__(*args, **kwargs) + + def __getitem__(self, key): + return self.roots[key] + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + @property + def anatomy(self): + """Parent Anatomy object. + + Returns: + Anatomy: Parent anatomy object. + + """ + return self._anatomy + + def reset(self): + """Reset current roots value.""" + self._roots = None + + def path_remapper( + self, path, dst_platform=None, src_platform=None, roots=None + ): + """Remap path for specific platform. + + Args: + path (str): Source path which need to be remapped. + dst_platform (Optional[str]): Specify destination platform + for which remapping should happen. + src_platform (Optional[str]): Specify source platform. This is + recommended to not use and keep unset until you really want + to use specific platform. + roots (Optional[Union[dict, RootItem])): It is possible to remap + path with different roots then instance where method was + called has. + + Returns: + Union[str, None]: When path does not contain known root then + None is returned else returns remapped path with "{root}" + or "{root[]}". + + """ + if roots is None: + roots = self.roots + + if roots is None: + raise ValueError("Roots are not set. Can't find path.") + + if "{root" in path: + path = path.format(**{"root": roots}) + # If `dst_platform` is not specified then return else continue. + if not dst_platform: + return path + + if isinstance(roots, RootItem): + return roots.path_remapper(path, dst_platform, src_platform) + + for _root in roots.values(): + result = self.path_remapper( + path, dst_platform, src_platform, _root + ) + if result is not None: + return result + + def find_root_template_from_path(self, path, roots=None): + """Find root value in entered path and replace it with formatting key. + + Args: + path (str): Source path where root will be searched. + roots (Optional[Union[AnatomyRoots, dict]): It is possible to use + different roots than instance where method was triggered has. + + Returns: + tuple: Output contains tuple with bool representing success as + first value and path with or without replaced root with + formatting key as second value. + + Raises: + ValueError: When roots are not entered and can't be loaded. + """ + if roots is None: + self.log.debug( + "Looking for matching root in path \"{}\".".format(path) + ) + roots = self.roots + + if roots is None: + raise ValueError("Roots are not set. Can't find path.") + + if isinstance(roots, RootItem): + return roots.find_root_template_from_path(path) + + for root_name, _root in roots.items(): + success, result = self.find_root_template_from_path(path, _root) + if success: + self.log.debug( + "Found match in root \"{}\".".format(root_name) + ) + return success, result + + self.log.warning("No matching root was found in current setting.") + return (False, path) + + def set_root_environments(self): + """Set root environments for current project.""" + for key, value in self.root_environments().items(): + os.environ[key] = value + + def root_environments(self): + """Use root keys to create unique keys for environment variables. + + Concatenates prefix "AYON_PROJECT_ROOT_" with root keys to create + unique keys. + + Returns: + dict: Result is `{(str): (str)}` dicitonary where key represents + unique key concatenated by keys and value is root value of + current platform root. + + Example: + With raw root values:: + "work": { + "windows": "P:/projects/work", + "linux": "/mnt/share/projects/work", + "darwin": "/darwin/path/work" + }, + "publish": { + "windows": "P:/projects/publish", + "linux": "/mnt/share/projects/publish", + "darwin": "/darwin/path/publish" + } + + Result on windows platform:: + { + "AYON_PROJECT_ROOT_WORK": "P:/projects/work", + "AYON_PROJECT_ROOT_PUBLISH": "P:/projects/publish" + } + + """ + return self._root_environments() + + def all_root_paths(self, roots=None): + """Return all paths for all roots of all platforms.""" + if roots is None: + roots = self.roots + + output = [] + if isinstance(roots, RootItem): + for value in roots.raw_data.values(): + output.append(value) + return output + + for _roots in roots.values(): + output.extend(self.all_root_paths(_roots)) + return output + + def _root_environments(self, keys=None, roots=None): + if not keys: + keys = [] + if roots is None: + roots = self.roots + + if isinstance(roots, RootItem): + key_items = [self.env_prefix] + for _key in keys: + key_items.append(_key.upper()) + + key = "_".join(key_items) + # Make sure key and value does not contain unicode + # - can happen in Python 2 hosts + return {str(key): str(roots.value)} + + output = {} + for _key, _value in roots.items(): + _keys = list(keys) + _keys.append(_key) + output.update(self._root_environments(_keys, _value)) + return output + + def root_environmets_fill_data(self, template=None): + """Environment variable values in dictionary for rootless path. + + Args: + template (str): Template for environment variable key fill. + By default is set to `"${}"`. + """ + if template is None: + template = "${}" + return self._root_environmets_fill_data(template) + + def _root_environmets_fill_data(self, template, keys=None, roots=None): + if keys is None and roots is None: + return { + "root": self._root_environmets_fill_data( + template, [], self.roots + ) + } + + if isinstance(roots, RootItem): + key_items = [AnatomyRoots.env_prefix] + for _key in keys: + key_items.append(_key.upper()) + key = "_".join(key_items) + return template.format(key) + + output = {} + for key, value in roots.items(): + _keys = list(keys) + _keys.append(key) + output[key] = self._root_environmets_fill_data( + template, _keys, value + ) + return output + + @property + def project_name(self): + """Current project name which will be used for loading root values. + + Returns: + str: Project name. + """ + return self._anatomy.project_name + + @property + def roots(self): + """Property for filling "root" key in templates. + + This property returns roots for current project or default root values. + + Warning: + Default roots value may cause issues when project use different + roots settings. That may happen when project use multiroot + templates but default roots miss their keys. + + """ + if self.project_name != self._loaded_project: + self._roots = None + + if self._roots is None: + self._roots = self._discover() + self._loaded_project = self.project_name + return self._roots + + def _discover(self): + """ Loads current project's roots or default. + + Default roots are loaded if project override's does not contain roots. + + Returns: + `RootItem` or `dict` with multiple `RootItem`s when multiroot + setting is used. + """ + + return self._parse_dict(self._anatomy["roots"], self) + + @staticmethod + def _parse_dict(data, parent): + """Parse roots raw data into RootItem or dictionary with RootItems. + + Converting raw roots data to `RootItem` helps to handle platform keys. + This method is recursive to be able handle multiroot setup and + is static to be able to load default roots without creating new object. + + Args: + data (dict): Should contain raw roots data to be parsed. + parent (AnatomyRoots): Parent object set as parent + for ``RootItem``. + + Returns: + dict[str, RootItem]: Root items by name. + + """ + output = {} + for root_name, root_values in data.items(): + output[root_name] = RootItem( + parent, root_values, root_name + ) + return output diff --git a/client/ayon_core/pipeline/anatomy/templates.py b/client/ayon_core/pipeline/anatomy/templates.py new file mode 100644 index 0000000000..d89b70719e --- /dev/null +++ b/client/ayon_core/pipeline/anatomy/templates.py @@ -0,0 +1,890 @@ +import os +import re +import copy +import collections +import numbers + +from ayon_core.lib.path_templates import ( + TemplateResult, + StringTemplate, +) + +from .exceptions import ( + ProjectNotSet, + TemplateMissingKey, + AnatomyTemplateUnsolved, +) + +_PLACEHOLDER = object() + + +class AnatomyTemplateResult(TemplateResult): + rootless = None + + def __new__(cls, result, rootless_path): + new_obj = super(AnatomyTemplateResult, cls).__new__( + cls, + str(result), + result.template, + result.solved, + result.used_values, + result.missing_keys, + result.invalid_types + ) + new_obj.rootless = rootless_path + return new_obj + + def validate(self): + if not self.solved: + raise AnatomyTemplateUnsolved( + self.template, + self.missing_keys, + self.invalid_types + ) + + def copy(self): + tmp = TemplateResult( + str(self), + self.template, + self.solved, + self.used_values, + self.missing_keys, + self.invalid_types + ) + return self.__class__(tmp, self.rootless) + + def normalized(self): + """Convert to normalized path.""" + + tmp = TemplateResult( + os.path.normpath(self), + self.template, + self.solved, + self.used_values, + self.missing_keys, + self.invalid_types + ) + return self.__class__(tmp, self.rootless) + + +class AnatomyStringTemplate(StringTemplate): + """String template which has access to anatomy. + + Args: + anatomy_templates (AnatomyTemplates): Anatomy templates object. + template (str): Template string. + """ + + def __init__(self, anatomy_templates, template): + self.anatomy_templates = anatomy_templates + super(AnatomyStringTemplate, self).__init__(template) + + def format(self, data): + """Format template and add 'root' key to data if not available. + + Args: + data (dict[str, Any]): Formatting data for template. + + Returns: + AnatomyTemplateResult: Formatting result. + """ + + anatomy_templates = self.anatomy_templates + if not data.get("root"): + data = copy.deepcopy(data) + data["root"] = anatomy_templates.anatomy.roots + result = StringTemplate.format(self, data) + rootless_path = anatomy_templates.get_rootless_path_from_result( + result + ) + return AnatomyTemplateResult(result, rootless_path) + + +def _merge_dict(main_dict, enhance_dict): + """Merges dictionaries by keys. + + Function call itself if value on key is again dictionary. + + Args: + main_dict (dict): First dict to merge second one into. + enhance_dict (dict): Second dict to be merged. + + Returns: + dict: Merged result. + + .. note:: does not override whole value on first found key + but only values differences from enhance_dict + + """ + + merge_queue = collections.deque() + merge_queue.append((main_dict, enhance_dict)) + while merge_queue: + queue_item = merge_queue.popleft() + l_dict, r_dict = queue_item + + for key, value in r_dict.items(): + if key not in l_dict: + l_dict[key] = value + elif isinstance(value, dict) and isinstance(l_dict[key], dict): + merge_queue.append((l_dict[key], value)) + else: + l_dict[key] = value + return main_dict + + +class TemplatesResultDict(dict): + """Holds and wrap 'AnatomyTemplateResult' for easy bug report. + + Dictionary like object which holds 'AnatomyTemplateResult' in the same + data structure as base dictionary of anatomy templates. It can raise + + """ + + def __init__(self, in_data, key=None, parent=None, strict=None): + super(TemplatesResultDict, self).__init__() + for _key, _value in in_data.items(): + if isinstance(_value, TemplatesResultDict): + _value.parent = self + elif isinstance(_value, dict): + _value = self.__class__(_value, _key, self) + self[_key] = _value + + if strict is None and parent is None: + strict = True + + self.key = key + self.parent = parent + self._is_strict = strict + + def __getitem__(self, key): + if key not in self.keys(): + hier = self.get_hierarchy() + hier.append(key) + raise TemplateMissingKey(hier) + + value = super(TemplatesResultDict, self).__getitem__(key) + if isinstance(value, self.__class__): + return value + + # Raise exception when expected solved templates and it is not. + if self.is_strict and hasattr(value, "validate"): + value.validate() + return value + + def get_is_strict(self): + return self._is_strict + + def set_is_strict(self, is_strict): + if is_strict is None and self.parent is None: + is_strict = True + self._is_strict = is_strict + for child in self.values(): + if isinstance(child, self.__class__): + child.set_is_strict(is_strict) + elif isinstance(child, AnatomyTemplateResult): + child.strict = is_strict + + strict = property(get_is_strict, set_is_strict) + is_strict = property(get_is_strict, set_is_strict) + + def get_hierarchy(self): + """Return dictionary keys one by one to root parent.""" + if self.key is None: + return [] + + if self.parent is None: + return [self.key] + + par_hier = list(self.parent.get_hierarchy()) + par_hier.append(self.key) + return par_hier + + @property + def missing_keys(self): + """Return missing keys of all children templates.""" + missing_keys = set() + for value in self.values(): + missing_keys |= value.missing_keys + return missing_keys + + @property + def invalid_types(self): + """Return invalid types of all children templates.""" + invalid_types = {} + for value in self.values(): + invalid_types = _merge_dict(invalid_types, value.invalid_types) + return invalid_types + + @property + def used_values(self): + """Return used values for all children templates.""" + used_values = {} + for value in self.values(): + used_values = _merge_dict(used_values, value.used_values) + return used_values + + def get_solved(self): + """Get only solved key from templates.""" + result = {} + for key, value in self.items(): + if isinstance(value, self.__class__): + value = value.get_solved() + if not value: + continue + result[key] = value + + elif ( + not hasattr(value, "solved") or + value.solved + ): + result[key] = value + return self.__class__(result, key=self.key, parent=self.parent) + + +class TemplateItem: + """Template item under template category. + + This item data usually contains 'file' and 'directory' by anatomy + definition, enhanced by common data ('frame_padding', + 'version_padding'). It adds 'path' key which is combination of + 'file' and 'directory' values. + + Args: + anatomy_templates (AnatomyTemplates): Anatomy templates object. + template_data (dict[str, Any]): Templates data. + + """ + def __init__(self, anatomy_templates, template_data): + template_data = copy.deepcopy(template_data) + + # Backwards compatibility for 'folder' + # TODO remove when deprecation not needed anymore + if ( + "folder" not in template_data + and "directory" in template_data + ): + template_data["folder"] = template_data["directory"] + + # Add 'path' key + if ( + "path" not in template_data + and "file" in template_data + and "directory" in template_data + ): + template_data["path"] = "/".join( + (template_data["directory"], template_data["file"]) + ) + + for key, value in template_data.items(): + if isinstance(value, str): + value = AnatomyStringTemplate(anatomy_templates, value) + template_data[key] = value + + self._template_data = template_data + self._anatomy_templates = anatomy_templates + + def __getitem__(self, key): + return self._template_data[key] + + def get(self, key, default=None): + return self._template_data.get(key, default) + + def format(self, data, strict=True): + output = {} + for key, value in self._template_data.items(): + if isinstance(value, AnatomyStringTemplate): + value = value.format(data) + output[key] = value + return TemplatesResultDict(output, strict=strict) + + +class TemplateCategory: + """Template category. + + Template category groups template items for specific usage. Categories + available at the moment are 'work', 'publish', 'hero', 'delivery', + 'staging' and 'others'. + + Args: + anatomy_templates (AnatomyTemplates): Anatomy templates object. + category_name (str): Category name. + category_data (dict[str, Any]): Category data. + + """ + def __init__(self, anatomy_templates, category_name, category_data): + for key, value in category_data.items(): + if isinstance(value, dict): + value = TemplateItem(anatomy_templates, value) + elif isinstance(value, str): + value = AnatomyStringTemplate(anatomy_templates, value) + category_data[key] = value + self._name = category_name + self._name_prefix = "{}_".format(category_name) + self._category_data = category_data + + def __getitem__(self, key): + new_key = self._convert_getter_key(key) + return self._category_data[new_key] + + def get(self, key, default=None): + new_key = self._convert_getter_key(key) + return self._category_data.get(new_key, default) + + @property + def name(self): + """Category name. + + Returns: + str: Category name. + + """ + return self._name + + def format(self, data, strict=True): + output = {} + for key, value in self._category_data.items(): + if isinstance(value, TemplateItem): + value = value.format(data, strict) + elif isinstance(value, AnatomyStringTemplate): + value = value.format(data) + + if isinstance(value, TemplatesResultDict): + value.key = key + output[key] = value + return TemplatesResultDict(output, key=self.name, strict=strict) + + def _convert_getter_key(self, key): + """Convert key for backwards compatibility. + + OpenPype compatible settings did contain template keys prefixed by + category name e.g. 'publish_render' which should be just 'render'. + + This method keeps the backwards compatibility but only if the key + starts with the category name prefix and the key is available in + roots. + + Args: + key (str): Key to be converted. + + Returns: + str: Converted string. + + """ + if key in self._category_data: + return key + + # Use default when the key is the category name + if key == self._name: + return "default" + + # Remove prefix if is key prefixed + if key.startswith(self._name_prefix): + new_key = key[len(self._name_prefix):] + if new_key in self._category_data: + return new_key + return key + + +class AnatomyTemplates: + inner_key_pattern = re.compile(r"(\{@.*?[^{}0]*\})") + inner_key_name_pattern = re.compile(r"\{@(.*?[^{}0]*)\}") + + def __init__(self, anatomy): + self._anatomy = anatomy + + self._loaded_project = None + self._raw_templates = None + self._templates = None + self._objected_templates = None + + def __getitem__(self, key): + self._validate_discovery() + return self._objected_templates[key] + + def get(self, key, default=None): + self._validate_discovery() + return self._objected_templates.get(key, default) + + def keys(self): + return self._objected_templates.keys() + + def reset(self): + self._raw_templates = None + self._templates = None + self._objected_templates = None + + @property + def anatomy(self): + """Anatomy instance. + + Returns: + Anatomy: Anatomy instance. + + """ + return self._anatomy + + @property + def project_name(self): + """Project name. + + Returns: + Union[str, None]: Project name if set, otherwise None. + + """ + return self._anatomy.project_name + + @property + def roots(self): + """Anatomy roots object. + + Returns: + RootItem: Anatomy roots data. + + """ + return self._anatomy.roots + + @property + def templates(self): + """Templates data. + + Templates data with replaced common data. + + Returns: + dict[str, Any]: Templates data. + + """ + self._validate_discovery() + return self._templates + + @property + def frame_padding(self): + """Default frame padding. + + Returns: + int: Frame padding used by default in templates. + + """ + self._validate_discovery() + return self["frame_padding"] + + @property + def version_padding(self): + """Default version padding. + + Returns: + int: Version padding used by default in templates. + + """ + self._validate_discovery() + return self["version_padding"] + + @classmethod + def get_rootless_path_from_result(cls, result): + """Calculate rootless path from formatting result. + + Args: + result (TemplateResult): Result of StringTemplate formatting. + + Returns: + str: Rootless path if result contains one of anatomy roots. + """ + + used_values = result.used_values + missing_keys = result.missing_keys + template = result.template + invalid_types = result.invalid_types + if ( + "root" not in used_values + or "root" in missing_keys + or "{root" not in template + ): + return + + for invalid_type in invalid_types: + if "root" in invalid_type: + return + + root_keys = cls._dict_to_subkeys_list({"root": used_values["root"]}) + if not root_keys: + return + + output = str(result) + for used_root_keys in root_keys: + if not used_root_keys: + continue + + used_value = used_values + root_key = None + for key in used_root_keys: + used_value = used_value[key] + if root_key is None: + root_key = key + else: + root_key += "[{}]".format(key) + + root_key = "{" + root_key + "}" + output = output.replace(str(used_value), root_key) + + return output + + def format(self, data, strict=True): + """Fill all templates based on entered data. + + Args: + data (dict[str, Any]): Fill data used for template formatting. + strict (Optional[bool]): Raise exception is accessed value is + not fully filled. + + Returns: + TemplatesResultDict: Output `TemplateResult` have `strict` + attribute set to False so accessing unfilled keys in templates + won't raise any exceptions. + + """ + self._validate_discovery() + copy_data = copy.deepcopy(data) + roots = self._anatomy.roots + if roots: + copy_data["root"] = roots + + return self._solve_dict(copy_data, strict) + + def format_all(self, in_data): + """Fill all templates based on entered data. + + Deprecated: + Use `format` method with `strict=False` instead. + + Args: + in_data (dict): Containing keys to be filled into template. + + Returns: + TemplatesResultDict: Output `TemplateResult` have `strict` + attribute set to False so accessing unfilled keys in templates + won't raise any exceptions. + + """ + return self.format(in_data, strict=False) + + def get_template_item( + self, category_name, template_name, subkey=None, default=_PLACEHOLDER + ): + """Get template item from category. + + Args: + category_name (str): Category name. + template_name (str): Template name. + subkey (Optional[str]): Subkey name. + default (Any): Default value if template is not found. + + Returns: + Any: Template item or subkey value. + + Raises: + KeyError: When any passed key is not available. Raise of error + does not happen if 'default' is filled. + + """ + self._validate_discovery() + category = self.get(category_name) + if category is None: + if default is not _PLACEHOLDER: + return default + raise KeyError("Category '{}' not found.".format(category_name)) + + template_item = category.get(template_name) + if template_item is None: + if default is not _PLACEHOLDER: + return default + raise KeyError( + "Template '{}' not found in category '{}'.".format( + template_name, category_name + ) + ) + + if subkey is None: + return template_item + + item = template_item.get(subkey) + if item is not None: + return item + + if default is not _PLACEHOLDER: + return default + raise KeyError( + "Subkey '{}' not found in '{}/{}'.".format( + subkey, category_name, template_name + ) + ) + + def _solve_dict(self, data, strict): + """ Solves templates with entered data. + + Args: + data (dict): Containing keys to be filled into template. + + Returns: + dict: With `TemplateResult` in values containing filled or + partially filled templates. + + """ + output = {} + for key, value in self._objected_templates.items(): + if isinstance(value, TemplateCategory): + value = value.format(data, strict) + elif isinstance(value, AnatomyStringTemplate): + value = value.format(data) + output[key] = value + return TemplatesResultDict(output, strict=strict) + + def _validate_discovery(self): + """Validate if templates are discovered and loaded for anatomy project. + + When project changes the cached data are reset and discovered again. + """ + if self.project_name != self._loaded_project: + self.reset() + + if self._templates is None: + self._discover() + self._loaded_project = self.project_name + + def _create_objected_templates(self, templates): + """Create objected templates from templates data. + + Args: + templates (dict[str, Any]): Templates data from project entity. + + Returns: + dict[str, Any]: Values are cnmverted to template objects. + + """ + objected_templates = {} + for category_name, category_value in copy.deepcopy(templates).items(): + if isinstance(category_value, dict): + category_value = TemplateCategory( + self, category_name, category_value + ) + elif isinstance(category_value, str): + category_value = AnatomyStringTemplate(self, category_value) + objected_templates[category_name] = category_value + return objected_templates + + def _discover(self): + """Load and cache templates from project entity.""" + if self.project_name is None: + raise ProjectNotSet("Anatomy project is not set.") + + templates = self.anatomy["templates"] + self._raw_templates = copy.deepcopy(templates) + + templates = copy.deepcopy(templates) + # Make sure all the keys are available + for key in ( + "publish", + "hero", + "work", + "delivery", + "staging", + "others", + ): + templates.setdefault(key, {}) + + solved_templates = self._solve_template_inner_links(templates) + self._templates = solved_templates + self._objected_templates = self._create_objected_templates( + solved_templates + ) + + @classmethod + def _replace_inner_keys(cls, matches, value, key_values, key): + """Replacement of inner keys in template values.""" + for match in matches: + anatomy_sub_keys = ( + cls.inner_key_name_pattern.findall(match) + ) + if key in anatomy_sub_keys: + raise ValueError(( + "Unsolvable recursion in inner keys, " + "key: \"{}\" is in his own value." + " Can't determine source, please check Anatomy templates." + ).format(key)) + + for anatomy_sub_key in anatomy_sub_keys: + replace_value = key_values.get(anatomy_sub_key) + if replace_value is None: + raise KeyError(( + "Anatomy templates can't be filled." + " Anatomy key `{0}` has" + " invalid inner key `{1}`." + ).format(key, anatomy_sub_key)) + + if not ( + isinstance(replace_value, numbers.Number) + or isinstance(replace_value, str) + ): + raise ValueError(( + "Anatomy templates can't be filled." + " Anatomy key `{0}` has" + " invalid inner key `{1}`" + " with value `{2}`." + ).format(key, anatomy_sub_key, str(replace_value))) + + value = value.replace(match, str(replace_value)) + + return value + + @classmethod + def _prepare_inner_keys(cls, key_values): + """Check values of inner keys. + + Check if inner key exist in template group and has valid value. + It is also required to avoid infinite loop with unsolvable recursion + when first inner key's value refers to second inner key's value where + first is used. + """ + keys_to_solve = set(key_values.keys()) + while True: + found = False + for key in tuple(keys_to_solve): + value = key_values[key] + + if isinstance(value, str): + matches = cls.inner_key_pattern.findall(value) + if not matches: + keys_to_solve.remove(key) + continue + + found = True + key_values[key] = cls._replace_inner_keys( + matches, value, key_values, key + ) + continue + + elif not isinstance(value, dict): + keys_to_solve.remove(key) + continue + + subdict_found = False + for _key, _value in tuple(value.items()): + matches = cls.inner_key_pattern.findall(_value) + if not matches: + continue + + subdict_found = True + found = True + key_values[key][_key] = cls._replace_inner_keys( + matches, _value, key_values, + "{}.{}".format(key, _key) + ) + + if not subdict_found: + keys_to_solve.remove(key) + + if not found: + break + + return key_values + + @classmethod + def _solve_template_inner_links(cls, templates): + """Solve templates inner keys identified by "{@*}". + + Process is split into 2 parts. + First is collecting all global keys (keys in top hierarchy where value + is not dictionary). All global keys are set for all group keys (keys + in top hierarchy where value is dictionary). Value of a key is not + overridden in group if already contain value for the key. + + In second part all keys with "at" symbol in value are replaced with + value of the key afterward "at" symbol from the group. + + Args: + templates (dict): Raw templates data. + + Example: + templates:: + key_1: "value_1", + key_2: "{@key_1}/{filling_key}" + + group_1: + key_3: "value_3/{@key_2}" + + group_2: + key_2": "value_2" + key_4": "value_4/{@key_2}" + + output:: + key_1: "value_1" + key_2: "value_1/{filling_key}" + + group_1: { + key_1: "value_1" + key_2: "value_1/{filling_key}" + key_3: "value_3/value_1/{filling_key}" + + group_2: { + key_1: "value_1" + key_2: "value_2" + key_4: "value_3/value_2" + + Returns: + dict[str, Any]: Solved templates data. + + """ + default_key_values = templates.pop("common", {}) + output = {} + for category_name, category_value in templates.items(): + new_category_value = {} + for key, value in category_value.items(): + key_values = copy.deepcopy(default_key_values) + key_values.update(value) + new_category_value[key] = cls._prepare_inner_keys(key_values) + output[category_name] = new_category_value + + default_keys_by_subkeys = cls._prepare_inner_keys(default_key_values) + for key, value in default_keys_by_subkeys.items(): + output[key] = value + + return output + + @classmethod + def _dict_to_subkeys_list(cls, subdict): + """Convert dictionary to list of subkeys. + + Example:: + + _dict_to_subkeys_list({ + "root": { + "work": "path/to/work", + "publish": "path/to/publish" + } + }) + [ + ["root", "work"], + ["root", "publish"] + ] + + + Args: + dict[str, Any]: Dictionary to be converted. + + Returns: + list[list[str]]: List of subkeys. + + """ + output = [] + subkey_queue = collections.deque() + subkey_queue.append((subdict, [])) + while subkey_queue: + queue_item = subkey_queue.popleft() + data, pre_keys = queue_item + for key, value in data.items(): + result = list(pre_keys) + result.append(key) + if isinstance(value, dict): + subkey_queue.append((value, result)) + else: + output.append(result) + return output diff --git a/client/ayon_core/pipeline/colorspace.py b/client/ayon_core/pipeline/colorspace.py index 7100984217..efa3bbf968 100644 --- a/client/ayon_core/pipeline/colorspace.py +++ b/client/ayon_core/pipeline/colorspace.py @@ -23,7 +23,7 @@ log = Logger.get_logger(__name__) class CachedData: - remapping = None + remapping = {} has_compatible_ocio_package = None config_version_data = {} ocio_config_colorspaces = {} @@ -774,8 +774,8 @@ def get_imageio_config( if not anatomy_data: from ayon_core.pipeline.context_tools import ( - get_template_data_from_session) - anatomy_data = get_template_data_from_session() + get_current_context_template_data) + anatomy_data = get_current_context_template_data() formatting_data = deepcopy(anatomy_data) diff --git a/client/ayon_core/pipeline/context_tools.py b/client/ayon_core/pipeline/context_tools.py index 86b3d770b4..33567d7280 100644 --- a/client/ayon_core/pipeline/context_tools.py +++ b/client/ayon_core/pipeline/context_tools.py @@ -1,26 +1,17 @@ """Core pipeline functionality""" import os -import types import logging import platform import uuid +import ayon_api import pyblish.api from pyblish.lib import MessageHandler from ayon_core import AYON_CORE_ROOT from ayon_core.host import HostBase -from ayon_core.client import ( - get_project, - get_asset_by_id, - get_asset_by_name, - version_is_latest, - get_asset_name_identifier, - get_ayon_server_api_connection, -) -from ayon_core.lib import is_in_tests -from ayon_core.lib.events import emit_event +from ayon_core.lib import is_in_tests, initialize_ayon_connection, emit_event from ayon_core.addon import load_addons, AddonsManager from ayon_core.settings import get_project_settings @@ -29,7 +20,6 @@ from .anatomy import Anatomy from .template_data import get_template_data_with_names from .workfile import ( get_workdir, - get_workfile_template_key, get_custom_workfile_template_by_string_context, ) from . import ( @@ -105,15 +95,15 @@ def install_host(host): """Install `host` into the running Python session. Args: - host (module): A Python module containing the Avalon - avalon host-interface. + host (HostBase): A host interface object. + """ global _is_installed _is_installed = True # Make sure global AYON connection has set site id and version - get_ayon_server_api_connection() + initialize_ayon_connection() addons_manager = _get_addons_manager() @@ -162,7 +152,17 @@ def install_host(host): def install_ayon_plugins(project_name=None, host_name=None): - # Make sure modules are loaded + """Install AYON core plugins and make sure the core is initialized. + + Args: + project_name (Optional[str]): Name of project to install plugins for. + host_name (Optional[str]): Name of host to install plugins for. + + """ + # Make sure global AYON connection has set site id and version + # - this is necessary if 'install_host' is not called + initialize_ayon_connection() + # Make sure addons are loaded load_addons() log.info("Registering global plug-ins..") @@ -228,6 +228,12 @@ def install_ayon_plugins(project_name=None, host_name=None): def install_openpype_plugins(project_name=None, host_name=None): + """Install AYON core plugins and make sure the core is initialized. + + Deprecated: + Use `install_ayon_plugins` instead. + + """ install_ayon_plugins(project_name, host_name) @@ -286,47 +292,6 @@ def deregister_host(): _registered_host["_"] = None -def debug_host(): - """A debug host, useful to debugging features that depend on a host""" - - host = types.ModuleType("debugHost") - - def ls(): - containers = [ - { - "representation": "ee-ft-a-uuid1", - "schema": "openpype:container-1.0", - "name": "Bruce01", - "objectName": "Bruce01_node", - "namespace": "_bruce01_", - "version": 3, - }, - { - "representation": "aa-bc-s-uuid2", - "schema": "openpype:container-1.0", - "name": "Bruce02", - "objectName": "Bruce01_node", - "namespace": "_bruce02_", - "version": 2, - } - ] - - for container in containers: - yield container - - host.__dict__.update({ - "ls": ls, - "open_file": lambda fname: None, - "save_file": lambda fname: None, - "current_file": lambda: os.path.expanduser("~/temp.txt"), - "has_unsaved_changes": lambda: False, - "work_root": lambda: os.path.expanduser("~/temp"), - "file_extensions": lambda: ["txt"], - }) - - return host - - def get_current_host_name(): """Current host name. @@ -352,7 +317,8 @@ def get_global_context(): Use 'get_current_context' to make sure you'll get current host integration context info. - Example: + Example:: + { "project_name": "Commercial", "folder_path": "Bunny", @@ -385,10 +351,10 @@ def get_current_project_name(): return get_global_context()["project_name"] -def get_current_asset_name(): +def get_current_folder_path(): host = registered_host() if isinstance(host, HostBase): - return host.get_current_asset_name() + return host.get_current_folder_path() return get_global_context()["folder_path"] @@ -399,51 +365,82 @@ def get_current_task_name(): return get_global_context()["task_name"] -def get_current_project(fields=None): +def get_current_project_entity(fields=None): """Helper function to get project document based on global Session. This function should be called only in process where host is installed. + Args: + fields (Optional[Iterable[str]]): Limit returned data of project + entity. + Returns: - dict: Project document. - None: Project is not set. + Union[dict[str, Any], None]: Project entity of current project or None. + """ - project_name = get_current_project_name() - return get_project(project_name, fields=fields) + return ayon_api.get_project(project_name, fields=fields) -def get_current_project_asset(asset_name=None, asset_id=None, fields=None): - """Helper function to get asset document based on global Session. +def get_current_folder_entity(fields=None): + """Helper function to get folder entity based on current context. This function should be called only in process where host is installed. - Asset is found out based on passed asset name or id (not both). Asset name - is not used for filtering if asset id is passed. When both asset name and - id are missing then asset name from current process is used. + Folder is based on current context project name and folder path. Args: - asset_name (str): Name of asset used for filter. - asset_id (Union[str, ObjectId]): Asset document id. If entered then - is used as only filter. - fields (Union[List[str], None]): Limit returned data of asset documents + fields (Optional[Iterable[str]]): Limit returned data of folder entity to specific keys. Returns: - dict: Asset document. - None: Asset is not set or not exist. + Union[dict[str, Any], None]: Folder entity or None. + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] - project_name = get_current_project_name() - if asset_id: - return get_asset_by_id(project_name, asset_id, fields=fields) + # Skip if is not set even on context + if not project_name or not folder_path: + return None + return ayon_api.get_folder_by_path( + project_name, folder_path, fields=fields + ) - if not asset_name: - asset_name = get_current_asset_name() - # Skip if is not set even on context - if not asset_name: - return None - return get_asset_by_name(project_name, asset_name, fields=fields) + +def get_current_task_entity(fields=None): + """Helper function to get task entity based on current context. + + This function should be called only in process where host is installed. + + Task is based on current context project name, folder path + and task name. + + Args: + fields (Optional[Iterable[str]]): Limit returned data of task entity + to specific keys. + + Returns: + Union[dict[str, Any], None]: Task entity or None. + + """ + context = get_current_context() + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + + # Skip if is not set even on context + if not project_name or not folder_path or not task_name: + return None + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + if not folder_entity: + return None + return ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name, fields=fields + ) def is_representation_from_latest(representation): @@ -457,7 +454,9 @@ def is_representation_from_latest(representation): """ project_name = get_current_project_name() - return version_is_latest(project_name, representation["parent"]) + return ayon_api.version_is_latest( + project_name, representation["versionId"] + ) def get_template_data_from_session(session=None, settings=None): @@ -475,18 +474,18 @@ def get_template_data_from_session(session=None, settings=None): if session is not None: project_name = session["AYON_PROJECT_NAME"] - asset_name = session["AYON_FOLDER_PATH"] + folder_path = session["AYON_FOLDER_PATH"] task_name = session["AYON_TASK_NAME"] host_name = session["AYON_HOST_NAME"] else: context = get_current_context() project_name = context["project_name"] - asset_name = context["folder_path"] + folder_path = context["folder_path"] task_name = context["task_name"] host_name = get_current_host_name() return get_template_data_with_names( - project_name, asset_name, task_name, host_name, settings + project_name, folder_path, task_name, host_name, settings ) @@ -503,98 +502,46 @@ def get_current_context_template_data(settings=None): context = get_current_context() project_name = context["project_name"] - asset_name = context["folder_path"] + folder_path = context["folder_path"] task_name = context["task_name"] host_name = get_current_host_name() return get_template_data_with_names( - project_name, asset_name, task_name, host_name, settings + project_name, folder_path, task_name, host_name, settings ) -def get_workdir_from_session(session=None, template_key=None): - """Template data for template fill from session keys. - - Args: - session (Union[Dict[str, str], None]): The Session to use. If not - provided use the currently active global Session. - template_key (str): Prepared template key from which workdir is - calculated. - - Returns: - str: Workdir path. - """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - project_name = get_current_project_name() - host_name = get_current_host_name() - template_data = get_template_data_from_session(session) - - if not template_key: - task_type = template_data["task"]["type"] - template_key = get_workfile_template_key( - task_type, - host_name, - project_name=project_name - ) - - anatomy = Anatomy(project_name) - template_obj = anatomy.templates_obj[template_key]["folder"] - path = template_obj.format_strict(template_data) - if path: - path = os.path.normpath(path) - return path - - -def get_custom_workfile_template_from_session( - session=None, project_settings=None -): +def get_current_context_custom_workfile_template(project_settings=None): """Filter and fill workfile template profiles by current context. - This function cab be used only inside host where context is set. + This function can be used only inside host where current context is set. Args: - session (Optional[Dict[str, str]]): Session from which are taken - data. - project_settings(Optional[Dict[str, Any]]): Project settings. + project_settings (Optional[dict[str, Any]]): Project settings Returns: str: Path to template or None if none of profiles match current context. (Existence of formatted path is not validated.) + """ - - if session is not None: - project_name = session["AYON_PROJECT_NAME"] - asset_name = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] - host_name = session["AYON_HOST_NAME"] - else: - context = get_current_context() - project_name = context["project_name"] - asset_name = context["folder_path"] - task_name = context["task_name"] - host_name = get_current_host_name() - + context = get_current_context() return get_custom_workfile_template_by_string_context( - project_name, - asset_name, - task_name, - host_name, + context["project_name"], + context["folder_path"], + context["task_name"], + get_current_host_name(), project_settings=project_settings ) -def change_current_context(asset_doc, task_name, template_key=None): +def change_current_context(folder_entity, task_entity, template_key=None): """Update active Session to a new task work area. - This updates the live Session to a different task under asset. + This updates the live Session to a different task under folder. Args: - asset_doc (Dict[str, Any]): The asset document to set. - task_name (str): The task to set under asset. + folder_entity (Dict[str, Any]): Folder entity to set. + task_entity (Dict[str, Any]): Task entity to set. template_key (Union[str, None]): Prepared template key to be used for workfile template in Anatomy. @@ -604,18 +551,22 @@ def change_current_context(asset_doc, task_name, template_key=None): project_name = get_current_project_name() workdir = None - if asset_doc: - project_doc = get_project(project_name) + folder_path = None + task_name = None + if folder_entity: + folder_path = folder_entity["path"] + if task_entity: + task_name = task_entity["name"] + project_entity = ayon_api.get_project(project_name) host_name = get_current_host_name() workdir = get_workdir( - project_doc, - asset_doc, - task_name, + project_entity, + folder_entity, + task_entity, host_name, template_key=template_key ) - folder_path = get_asset_name_identifier(asset_doc) envs = { "AYON_PROJECT_NAME": project_name, "AYON_FOLDER_PATH": folder_path, @@ -635,7 +586,7 @@ def change_current_context(asset_doc, task_name, template_key=None): # Convert env keys to human readable keys data["project_name"] = project_name - data["folder_path"] = get_asset_name_identifier(asset_doc) + data["folder_path"] = folder_path data["task_name"] = task_name data["workdir_path"] = workdir diff --git a/client/ayon_core/pipeline/create/README.md b/client/ayon_core/pipeline/create/README.md index bbfd1bfa0f..09d3a22222 100644 --- a/client/ayon_core/pipeline/create/README.md +++ b/client/ayon_core/pipeline/create/README.md @@ -8,7 +8,7 @@ Discovers Creator plugins to be able create new instances and convert existing i Publish plugins are loaded because they can also define attributes definitions. These are less product type specific To be able define attributes Publish plugin must inherit from `AYONPyblishPluginMixin` and must override `get_attribute_defs` class method which must return list of attribute definitions. Values of publish plugin definitions are stored per plugin name under `publish_attributes`. Also can override `convert_attribute_values` class method which gives ability to modify values on instance before are used in CreatedInstance. Method `convert_attribute_values` can be also used without `get_attribute_defs` to modify values when changing compatibility (remove metadata from instance because are irrelevant). -Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`. +Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`. Except creating and removing instances are all changes not automatically propagated to host context (scene/workfile/...) to propagate changes call `save_changes` which trigger update of all instances in context using Creators implementation. diff --git a/client/ayon_core/pipeline/create/context.py b/client/ayon_core/pipeline/create/context.py index 425de4305f..b8618738fb 100644 --- a/client/ayon_core/pipeline/create/context.py +++ b/client/ayon_core/pipeline/create/context.py @@ -10,12 +10,8 @@ from contextlib import contextmanager import pyblish.logic import pyblish.api +import ayon_api -from ayon_core.client import ( - get_assets, - get_asset_by_name, - get_asset_name_identifier, -) from ayon_core.settings import get_project_settings from ayon_core.lib.attribute_definitions import ( UnknownDef, @@ -533,7 +529,7 @@ class AttributeValues(object): Has dictionary like methods. Not all of them are allowed all the time. Args: - attr_defs(AbstractAttrDef): Defintions of value type and properties. + attr_defs(AbstractAttrDef): Definitions of value type and properties. values(dict): Values after possible conversion. origin_data(dict): Values loaded from host before conversion. """ @@ -854,7 +850,7 @@ class CreatedInstance: """Instance entity with data that will be stored to workfile. I think `data` must be required argument containing all minimum information - about instance like "asset" and "task" and all data used for filling + about instance like "folderPath" and "task" and all data used for filling product name as creators may have custom data for product name filling. Notes: @@ -982,7 +978,7 @@ class CreatedInstance: if not self._data.get("instance_id"): self._data["instance_id"] = str(uuid4()) - self._asset_is_valid = self.has_set_asset + self._folder_is_valid = self.has_set_folder self._task_is_valid = self.has_set_task def __str__(self): @@ -1283,8 +1279,8 @@ class CreatedInstance: # Context validation related methods/properties @property - def has_set_asset(self): - """Asset name is set in data.""" + def has_set_folder(self): + """Folder path is set in data.""" return "folderPath" in self._data @@ -1298,15 +1294,15 @@ class CreatedInstance: def has_valid_context(self): """Context data are valid for publishing.""" - return self.has_valid_asset and self.has_valid_task + return self.has_valid_folder and self.has_valid_task @property - def has_valid_asset(self): - """Asset set in context exists in project.""" + def has_valid_folder(self): + """Folder set in context exists in project.""" - if not self.has_set_asset: + if not self.has_set_folder: return False - return self._asset_is_valid + return self._folder_is_valid @property def has_valid_task(self): @@ -1316,9 +1312,9 @@ class CreatedInstance: return False return self._task_is_valid - def set_asset_invalid(self, invalid): - # TODO replace with `set_asset_name` - self._asset_is_valid = not invalid + def set_folder_invalid(self, invalid): + # TODO replace with `set_folder_path` + self._folder_is_valid = not invalid def set_task_invalid(self, invalid): # TODO replace with `set_task_name` @@ -1402,7 +1398,7 @@ class CreateContext: ).format(joined_methods)) self._current_project_name = None - self._current_asset_name = None + self._current_folder_path = None self._current_task_name = None self._current_workfile_path = None @@ -1557,14 +1553,14 @@ class CreateContext: return self._current_project_name - def get_current_asset_name(self): - """Asset name which was used as current context on context reset. + def get_current_folder_path(self): + """Folder path which was used as current context on context reset. Returns: - Union[str, None]: Asset name. + Union[str, None]: Folder path. """ - return self._current_asset_name + return self._current_folder_path def get_current_task_name(self): """Task name which was used as current context on context reset. @@ -1600,19 +1596,19 @@ class CreateContext: def context_has_changed(self): """Host context has changed. - As context is used project, asset, task name and workfile path if + As context is used project, folder, task name and workfile path if host does support workfiles. Returns: bool: Context changed. """ - project_name, asset_name, task_name, workfile_path = ( + project_name, folder_path, task_name, workfile_path = ( self._get_current_host_context() ) return ( self._current_project_name != project_name - or self._current_asset_name != asset_name + or self._current_folder_path != folder_path or self._current_task_name != task_name or self._current_workfile_path != workfile_path ) @@ -1683,18 +1679,18 @@ class CreateContext: self.refresh_thumbnails() def _get_current_host_context(self): - project_name = asset_name = task_name = workfile_path = None + project_name = folder_path = task_name = workfile_path = None if hasattr(self.host, "get_current_context"): host_context = self.host.get_current_context() if host_context: project_name = host_context.get("project_name") - asset_name = host_context.get("folder_path") + folder_path = host_context.get("folder_path") task_name = host_context.get("task_name") if isinstance(self.host, IWorkfileHost): workfile_path = self.host.get_current_workfile() - return project_name, asset_name, task_name, workfile_path + return project_name, folder_path, task_name, workfile_path def reset_current_context(self): """Refresh current context. @@ -1713,12 +1709,12 @@ class CreateContext: are stored. We should store the workfile (if is available) too. """ - project_name, asset_name, task_name, workfile_path = ( + project_name, folder_path, task_name, workfile_path = ( self._get_current_host_context() ) self._current_project_name = project_name - self._current_asset_name = asset_name + self._current_folder_path = folder_path self._current_task_name = task_name self._current_workfile_path = workfile_path @@ -1794,10 +1790,10 @@ class CreateContext: creator_identifier = creator_class.identifier if creator_identifier in creators: - self.log.warning(( - "Duplicated Creator identifier. " - "Using first and skipping following" - )) + self.log.warning( + "Duplicate Creator identifier: '%s'. Using first Creator " + "and skipping: %s", creator_identifier, creator_class + ) continue # Filter by host name @@ -1950,45 +1946,54 @@ class CreateContext: self, creator_identifier, variant, - asset_doc=None, - task_name=None, + folder_entity=None, + task_entity=None, pre_create_data=None ): """Trigger create of plugins with standartized arguments. - Arguments 'asset_doc' and 'task_name' use current context as default - values. If only 'task_name' is provided it will be overriden by - task name from current context. If 'task_name' is not provided - when 'asset_doc' is, it is considered that task name is not specified, - which can lead to error if product name template requires task name. + Arguments 'folder_entity' and 'task_name' use current context as + default values. If only 'task_entity' is provided it will be + overridden by task name from current context. If 'task_name' is not + provided when 'folder_entity' is, it is considered that task name is + not specified, which can lead to error if product name template + requires task name. Args: creator_identifier (str): Identifier of creator plugin. variant (str): Variant used for product name. - asset_doc (Dict[str, Any]): Asset document which define context of - creation (possible context of created instance/s). - task_name (str): Name of task to which is context related. + folder_entity (Dict[str, Any]): Folder entity which define context + of creation (possible context of created instance/s). + task_entity (Dict[str, Any]): Task entity. pre_create_data (Dict[str, Any]): Pre-create attribute values. Returns: Any: Output of triggered creator's 'create' method. Raises: - CreatorError: If creator was not found or asset is empty. + CreatorError: If creator was not found or folder is empty. """ creator = self._get_creator_in_create(creator_identifier) project_name = self.project_name - if asset_doc is None: - asset_name = self.get_current_asset_name() - asset_doc = get_asset_by_name(project_name, asset_name) - task_name = self.get_current_task_name() - if asset_doc is None: + if folder_entity is None: + folder_path = self.get_current_folder_path() + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path + ) + if folder_entity is None: raise CreatorError( - "Asset with name {} was not found".format(asset_name) + "Folder '{}' was not found".format(folder_path) ) + task_name = None + if task_entity is None: + task_name = self.get_current_task_name() + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + if pre_create_data is None: pre_create_data = {} @@ -2005,15 +2010,14 @@ class CreateContext: product_name = creator.get_product_name( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, self.host_name, ) - asset_name = get_asset_name_identifier(asset_doc) instance_data = { - "folderPath": asset_name, + "folderPath": folder_entity["path"], "task": task_name, "productType": creator.product_type, "variant": variant @@ -2226,7 +2230,7 @@ class CreateContext: raise CreatorsCreateFailed(failed_info) def validate_instances_context(self, instances=None): - """Validate 'asset' and 'task' instance context.""" + """Validate 'folder' and 'task' instance context.""" # Use all instances from context if 'instances' are not passed if instances is None: instances = tuple(self._instances_by_id.values()) @@ -2235,54 +2239,83 @@ class CreateContext: if not instances: return - task_names_by_asset_name = {} + project_name = self.project_name + + task_names_by_folder_path = {} for instance in instances: - asset_name = instance.get("folderPath") + folder_path = instance.get("folderPath") task_name = instance.get("task") - if asset_name: - task_names_by_asset_name[asset_name] = set() + if folder_path: + task_names_by_folder_path[folder_path] = set() if task_name: - task_names_by_asset_name[asset_name].add(task_name) + task_names_by_folder_path[folder_path].add(task_name) - asset_names = { - asset_name - for asset_name in task_names_by_asset_name.keys() - if asset_name is not None - } - asset_docs = list(get_assets( - self.project_name, - asset_names=asset_names, - fields={"name", "data.tasks", "data.parents"} - )) + # Backwards compatibility for cases where folder name is set instead + # of folder path + folder_names = set() + folder_paths = set() + for folder_path in task_names_by_folder_path.keys(): + if folder_path is None: + pass + elif "/" in folder_path: + folder_paths.add(folder_path) + else: + folder_names.add(folder_path) - task_names_by_asset_name = {} - asset_docs_by_name = collections.defaultdict(list) - for asset_doc in asset_docs: - asset_name = get_asset_name_identifier(asset_doc) - tasks = asset_doc.get("data", {}).get("tasks") or {} - task_names_by_asset_name[asset_name] = set(tasks.keys()) - asset_docs_by_name[asset_doc["name"]].append(asset_doc) + folder_paths_by_id = {} + if folder_paths: + for folder_entity in ayon_api.get_folders( + project_name, + folder_paths=folder_paths, + fields={"id", "path"} + ): + folder_id = folder_entity["id"] + folder_paths_by_id[folder_id] = folder_entity["path"] + + folder_entities_by_name = collections.defaultdict(list) + if folder_names: + for folder_entity in ayon_api.get_folders( + project_name, + folder_names=folder_names, + fields={"id", "name", "path"} + ): + folder_id = folder_entity["id"] + folder_name = folder_entity["name"] + folder_paths_by_id[folder_id] = folder_entity["path"] + folder_entities_by_name[folder_name].append(folder_entity) + + tasks_entities = ayon_api.get_tasks( + project_name, + folder_ids=folder_paths_by_id.keys(), + fields={"name", "folderId"} + ) + + task_names_by_folder_path = collections.defaultdict(set) + for task_entity in tasks_entities: + folder_id = task_entity["folderId"] + folder_path = folder_paths_by_id[folder_id] + task_names_by_folder_path[folder_path].add(task_entity["name"]) for instance in instances: - if not instance.has_valid_asset or not instance.has_valid_task: + if not instance.has_valid_folder or not instance.has_valid_task: continue - asset_name = instance["folderPath"] - if asset_name and "/" not in asset_name: - asset_docs = asset_docs_by_name.get(asset_name) - if len(asset_docs) == 1: - asset_name = get_asset_name_identifier(asset_docs[0]) - instance["folderPath"] = asset_name + folder_path = instance["folderPath"] + if folder_path and "/" not in folder_path: + folder_entities = folder_entities_by_name.get(folder_path) + if len(folder_entities) == 1: + folder_path = folder_entities[0]["path"] + instance["folderPath"] = folder_path - if asset_name not in task_names_by_asset_name: - instance.set_asset_invalid(True) + if folder_path not in task_names_by_folder_path: + instance.set_folder_invalid(True) continue task_name = instance["task"] if not task_name: continue - if task_name not in task_names_by_asset_name[asset_name]: + if task_name not in task_names_by_folder_path[folder_path]: instance.set_task_invalid(True) def save_changes(self): diff --git a/client/ayon_core/pipeline/create/creator_plugins.py b/client/ayon_core/pipeline/create/creator_plugins.py index cb8e4a2d1c..e0b30763d0 100644 --- a/client/ayon_core/pipeline/create/creator_plugins.py +++ b/client/ayon_core/pipeline/create/creator_plugins.py @@ -33,7 +33,7 @@ class CreatorError(Exception): @six.add_metaclass(ABCMeta) -class SubsetConvertorPlugin(object): +class ProductConvertorPlugin(object): """Helper for conversion of instances created using legacy creators. Conversion from legacy creators would mean to loose legacy instances, @@ -347,7 +347,7 @@ class BaseCreator: Returns: str: Group label that can be used for grouping of instances in UI. - Group label can be overriden by instance itself. + Group label can be overridden by instance itself. """ if self._cached_group_label is None: @@ -472,8 +472,8 @@ class BaseCreator: def get_dynamic_data( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, instance @@ -489,31 +489,21 @@ class BaseCreator: def get_product_name( self, project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name=None, instance=None ): """Return product name for passed context. - CHANGES: - Argument `asset_id` was replaced with `asset_doc`. It is easier to - query asset before. In some cases would this method be called multiple - times and it would be too slow to query asset document on each - callback. - - NOTE: - Asset document is not used yet but is required if would like to use - task type in product templates. - Method is also called on product name update. In that case origin instance is passed in. Args: project_name (str): Project name. - asset_doc (dict): Asset document for which product is created. - task_name (str): For which task product is created. + folder_entity (dict): Folder entity. + task_entity (dict): Task entity. variant (str): Product name variant. In most of cases user input. host_name (Optional[str]): Which host creates product. Defaults to host name on create context. @@ -524,10 +514,16 @@ class BaseCreator: if host_name is None: host_name = self.create_context.host_name + + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] + dynamic_data = self.get_dynamic_data( project_name, - asset_doc, - task_name, + folder_entity, + task_entity, variant, host_name, instance @@ -535,8 +531,8 @@ class BaseCreator: return get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, self.product_type, variant, @@ -611,18 +607,19 @@ class Creator(BaseCreator): """ # GUI Purposes - # - default_variants may not be used if `get_default_variants` is overriden + # - default_variants may not be used if `get_default_variants` + # is overridden default_variants = [] # Default variant used in 'get_default_variant' _default_variant = None # Short description of product type - # - may not be used if `get_description` is overriden + # - may not be used if `get_description` is overridden description = None # Detailed description of product type for artists - # - may not be used if `get_detail_description` is overriden + # - may not be used if `get_detail_description` is overridden detailed_description = None # It does make sense to change context on creation @@ -815,7 +812,7 @@ def discover_creator_plugins(*args, **kwargs): def discover_convertor_plugins(*args, **kwargs): - return discover(SubsetConvertorPlugin, *args, **kwargs) + return discover(ProductConvertorPlugin, *args, **kwargs) def discover_legacy_creator_plugins(): @@ -874,8 +871,8 @@ def register_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): register_plugin(LegacyCreator, plugin) - elif issubclass(plugin, SubsetConvertorPlugin): - register_plugin(SubsetConvertorPlugin, plugin) + elif issubclass(plugin, ProductConvertorPlugin): + register_plugin(ProductConvertorPlugin, plugin) def deregister_creator_plugin(plugin): @@ -885,20 +882,20 @@ def deregister_creator_plugin(plugin): elif issubclass(plugin, LegacyCreator): deregister_plugin(LegacyCreator, plugin) - elif issubclass(plugin, SubsetConvertorPlugin): - deregister_plugin(SubsetConvertorPlugin, plugin) + elif issubclass(plugin, ProductConvertorPlugin): + deregister_plugin(ProductConvertorPlugin, plugin) def register_creator_plugin_path(path): register_plugin_path(BaseCreator, path) register_plugin_path(LegacyCreator, path) - register_plugin_path(SubsetConvertorPlugin, path) + register_plugin_path(ProductConvertorPlugin, path) def deregister_creator_plugin_path(path): deregister_plugin_path(BaseCreator, path) deregister_plugin_path(LegacyCreator, path) - deregister_plugin_path(SubsetConvertorPlugin, path) + deregister_plugin_path(ProductConvertorPlugin, path) def cache_and_get_instances(creator, shared_key, list_instances_func): diff --git a/client/ayon_core/pipeline/create/legacy_create.py b/client/ayon_core/pipeline/create/legacy_create.py index 5e23a74a79..ab939343c9 100644 --- a/client/ayon_core/pipeline/create/legacy_create.py +++ b/client/ayon_core/pipeline/create/legacy_create.py @@ -9,7 +9,6 @@ import os import logging import collections -from ayon_core.client import get_asset_by_id from ayon_core.pipeline.constants import AVALON_INSTANCE_ID from .product_name import get_product_name @@ -45,7 +44,7 @@ class LegacyCreator(object): @classmethod def apply_settings(cls, project_settings): - """Apply OpenPype settings to a plugin class.""" + """Apply AYON settings to a plugin class.""" host_name = os.environ.get("AYON_HOST_NAME") plugin_type = "create" @@ -89,7 +88,7 @@ class LegacyCreator(object): @classmethod def get_dynamic_data( - cls, project_name, asset_id, task_name, variant, host_name + cls, project_name, folder_entity, task_entity, variant, host_name ): """Return dynamic data for current Creator plugin. @@ -124,7 +123,7 @@ class LegacyCreator(object): @classmethod def get_product_name( - cls, project_name, asset_id, task_name, variant, host_name=None + cls, project_name, folder_entity, task_entity, variant, host_name=None ): """Return product name created with entered arguments. @@ -137,8 +136,8 @@ class LegacyCreator(object): Args: project_name (str): Context's project name. - asset_id (str): Folder id. - task_name (str): Context's task name. + folder_entity (dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. variant (str): What is entered by user in creator tool. host_name (str): Name of host. @@ -148,17 +147,16 @@ class LegacyCreator(object): """ dynamic_data = cls.get_dynamic_data( - project_name, asset_id, task_name, variant, host_name + project_name, folder_entity, task_entity, variant, host_name ) - - asset_doc = get_asset_by_id( - project_name, asset_id, fields=["data.tasks"] - ) - + task_name = task_type = None + if task_entity: + task_name = task_entity["name"] + task_type = task_entity["taskType"] return get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, cls.product_type, variant, @@ -166,7 +164,9 @@ class LegacyCreator(object): ) -def legacy_create(Creator, name, asset, options=None, data=None): +def legacy_create( + Creator, product_name, folder_path, options=None, data=None +): """Create a new instance Associate nodes with a product name and type. These nodes are later @@ -178,11 +178,11 @@ def legacy_create(Creator, name, asset, options=None, data=None): and finally asset browsers to help identify the origin of the asset. Arguments: - Creator (Creator): Class of creator - name (str): Name of product - asset (str): Name of asset - options (dict, optional): Additional options from GUI - data (dict, optional): Additional data from GUI + Creator (Creator): Class of creator. + product_name (str): Name of product. + folder_path (str): Folder path. + options (dict, optional): Additional options from GUI. + data (dict, optional): Additional data from GUI. Raises: NameError on `productName` already exists @@ -196,7 +196,7 @@ def legacy_create(Creator, name, asset, options=None, data=None): from ayon_core.pipeline import registered_host host = registered_host() - plugin = Creator(name, asset, options, data) + plugin = Creator(product_name, folder_path, options, data) if plugin.maintain_selection is True: with host.maintained_selection(): diff --git a/client/ayon_core/pipeline/create/product_name.py b/client/ayon_core/pipeline/create/product_name.py index 8413bfa9d8..fecda867e5 100644 --- a/client/ayon_core/pipeline/create/product_name.py +++ b/client/ayon_core/pipeline/create/product_name.py @@ -1,5 +1,3 @@ -import os - from ayon_core.settings import get_project_settings from ayon_core.lib import filter_profiles, prepare_template_data @@ -33,7 +31,7 @@ def get_product_name_template( Args: project_name (str): Project on which the context lives. - product_type (str): Product type for which the subset name is + product_type (str): Product type for which the product name is calculated. host_name (str): Name of host in which the product name is calculated. task_name (str): Name of task in which context the product is created. @@ -81,8 +79,8 @@ def get_product_name_template( def get_product_name( project_name, - asset_doc, task_name, + task_type, host_name, product_type, variant, @@ -107,12 +105,11 @@ def get_product_name( Args: project_name (str): Project name. + task_name (Union[str, None]): Task name. + task_type (Union[str, None]): Task type. host_name (str): Host name. product_type (str): Product type. variant (str): In most of the cases it is user input during creation. - task_name (str): Task name on which context is instance created. - asset_doc (dict): Queried asset document with its tasks in data. - Used to get task type. default_template (Optional[str]): Default template if any profile does not match passed context. Constant 'DEFAULT_PRODUCT_TEMPLATE' is used if is not passed. @@ -132,10 +129,6 @@ def get_product_name( if not product_type: return "" - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") - template = get_product_name_template( project_name, product_type_filter or product_type, diff --git a/client/ayon_core/pipeline/create/utils.py b/client/ayon_core/pipeline/create/utils.py index 44063bd9ac..b43741e183 100644 --- a/client/ayon_core/pipeline/create/utils.py +++ b/client/ayon_core/pipeline/create/utils.py @@ -1,11 +1,6 @@ import collections -from ayon_core.client import ( - get_assets, - get_subsets, - get_last_versions, - get_asset_name_identifier, -) +import ayon_api def get_last_versions_for_instances( @@ -54,49 +49,50 @@ def get_last_versions_for_instances( if not product_names: return output - asset_docs = get_assets( + folder_entities = ayon_api.get_folders( project_name, - asset_names=product_names_by_folder_path.keys(), - fields=["name", "_id", "data.parents"] + folder_paths=product_names_by_folder_path.keys(), + fields={"id", "path"} ) folder_paths_by_id = { - asset_doc["_id"]: get_asset_name_identifier(asset_doc) - for asset_doc in asset_docs + folder_entity["id"]: folder_entity["path"] + for folder_entity in folder_entities } if not folder_paths_by_id: return output - subset_docs = get_subsets( + product_entities = ayon_api.get_products( project_name, - asset_ids=folder_paths_by_id.keys(), - subset_names=product_names, - fields=["_id", "name", "parent"] + folder_ids=folder_paths_by_id.keys(), + product_names=product_names, + fields={"id", "name", "folderId"} ) - subset_docs_by_id = {} - for subset_doc in subset_docs: - # Filter subset docs by subset names under parent - folder_id = subset_doc["parent"] + product_entities_by_id = {} + for product_entity in product_entities: + # Filter product entities by names under parent + folder_id = product_entity["folderId"] + product_name = product_entity["name"] folder_path = folder_paths_by_id[folder_id] - product_name = subset_doc["name"] if product_name not in product_names_by_folder_path[folder_path]: continue - subset_docs_by_id[subset_doc["_id"]] = subset_doc + product_entities_by_id[product_entity["id"]] = product_entity - if not subset_docs_by_id: + if not product_entities_by_id: return output - last_versions_by_product_id = get_last_versions( + last_versions_by_product_id = ayon_api.get_last_versions( project_name, - subset_docs_by_id.keys(), - fields=["name", "parent"] + product_entities_by_id.keys(), + fields={"version", "productId"} ) - for subset_id, version_doc in last_versions_by_product_id.items(): - subset_doc = subset_docs_by_id[subset_id] - folder_id = subset_doc["parent"] + for product_id, version_entity in last_versions_by_product_id.items(): + product_entity = product_entities_by_id[product_id] + product_name = product_entity["name"] + folder_id = product_entity["folderId"] folder_path = folder_paths_by_id[folder_id] - _instances = instances_by_hierarchy[folder_path][subset_doc["name"]] + _instances = instances_by_hierarchy[folder_path][product_name] for instance in _instances: - output[instance.id] = version_doc["name"] + output[instance.id] = version_entity["version"] return output diff --git a/client/ayon_core/pipeline/delivery.py b/client/ayon_core/pipeline/delivery.py index cb90e67090..029775e1db 100644 --- a/client/ayon_core/pipeline/delivery.py +++ b/client/ayon_core/pipeline/delivery.py @@ -77,8 +77,10 @@ def check_destination_path( """ anatomy_data.update(datetime_data) - anatomy_filled = anatomy.format_all(anatomy_data) - dest_path = anatomy_filled["delivery"][template_name] + path_template = anatomy.get_template_item( + "delivery", template_name, "path" + ) + dest_path = path_template.format(anatomy_data) report_items = collections.defaultdict(list) if not dest_path.solved: @@ -143,14 +145,16 @@ def deliver_single_file( src_path = os.path.normpath(src_path.replace("\\", "/")) if not os.path.exists(src_path): - msg = "{} doesn't exist for {}".format(src_path, repre["_id"]) + msg = "{} doesn't exist for {}".format(src_path, repre["id"]) report_items["Source file was not found"].append(msg) return report_items, 0 if format_dict: anatomy_data = copy.deepcopy(anatomy_data) anatomy_data["root"] = format_dict["root"] - template_obj = anatomy.templates_obj["delivery"][template_name] + template_obj = anatomy.get_template_item( + "delivery", template_name, "path" + ) delivery_path = template_obj.format_strict(anatomy_data) # Backwards compatibility when extension contained `.` @@ -216,12 +220,13 @@ def deliver_sequence( if not hash_path_exist(src_path): msg = "{} doesn't exist for {}".format( - src_path, repre["_id"]) + src_path, repre["id"]) report_items["Source file was not found"].append(msg) return report_items, 0 - delivery_templates = anatomy.templates.get("delivery") or {} - delivery_template = delivery_templates.get(template_name) + delivery_template = anatomy.get_template_item( + "delivery", template_name, "path", default=None + ) if delivery_template is None: msg = ( "Delivery template \"{}\" in anatomy of project \"{}\"" @@ -232,7 +237,7 @@ def deliver_sequence( # Check if 'frame' key is available in template which is required # for sequence delivery - if "{frame" not in delivery_template: + if "{frame" not in delivery_template.template: msg = ( "Delivery template \"{}\" in anatomy of project \"{}\"" "does not contain '{{frame}}' key to fill. Delivery of sequence" @@ -277,8 +282,7 @@ def deliver_sequence( anatomy_data["frame"] = frame_indicator if format_dict: anatomy_data["root"] = format_dict["root"] - template_obj = anatomy.templates_obj["delivery"][template_name] - delivery_path = template_obj.format_strict(anatomy_data) + delivery_path = delivery_template.format_strict(anatomy_data) delivery_path = os.path.normpath(delivery_path.replace("\\", "/")) delivery_folder = os.path.dirname(delivery_path) diff --git a/client/ayon_core/pipeline/editorial.py b/client/ayon_core/pipeline/editorial.py index 564d78ea6f..84bffbe1ec 100644 --- a/client/ayon_core/pipeline/editorial.py +++ b/client/ayon_core/pipeline/editorial.py @@ -64,7 +64,7 @@ def convert_to_padded_path(path, padding): padding (int): number of padding Returns: - type: string with reformated path + type: string with reformatted path Example: convert_to_padded_path("plate.%d.exr") > plate.%04d.exr diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.py b/client/ayon_core/pipeline/farm/pyblish_functions.py index c669d95c1e..eb6f8569d9 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.py +++ b/client/ayon_core/pipeline/farm/pyblish_functions.py @@ -1,20 +1,16 @@ -import copy -import attr -import pyblish.api import os -import clique -from copy import deepcopy +import copy import re import warnings +from copy import deepcopy + +import attr +import ayon_api +import clique from ayon_core.pipeline import ( get_current_project_name, get_representation_path, - Anatomy, -) -from ayon_core.client import ( - get_last_version_by_subset_name, - get_representations ) from ayon_core.lib import Logger from ayon_core.pipeline.publish import KnownPublishError @@ -78,16 +74,19 @@ def extend_frames(folder_path, product_name, start, end): prev_end = None project_name = get_current_project_name() - version = get_last_version_by_subset_name( + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} + ) + version_entity = ayon_api.get_last_version_by_product_name( project_name, product_name, - asset_name=folder_path + folder_entity["id"] ) # Set prev start / end frames for comparison if not prev_start and not prev_end: - prev_start = version["data"]["frameStart"] - prev_end = version["data"]["frameEnd"] + prev_start = version_entity["attrib"]["frameStart"] + prev_end = version_entity["attrib"]["frameEnd"] updated_start = min(start, prev_start) updated_end = max(end, prev_end) @@ -136,7 +135,7 @@ def get_transferable_representations(instance): list of dicts: List of transferable representations. """ - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] to_transfer = [] for representation in instance.data.get("representations", []): @@ -165,7 +164,6 @@ def get_transferable_representations(instance): def create_skeleton_instance( instance, families_transfer=None, instance_transfer=None): - # type: (pyblish.api.Instance, list, dict) -> dict """Create skeleton instance from original instance data. This will create dictionary containing skeleton @@ -190,7 +188,7 @@ def create_skeleton_instance( context = instance.context data = instance.data.copy() - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] # get time related data from instance (or context) time_data = get_time_data_from_instance_or_context(instance) @@ -619,14 +617,31 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, aov_patterns = aov_filter preview = match_aov_pattern(app, aov_patterns, render_file_name) - # toggle preview on if multipart is on - if instance.data.get("multipartExr"): - log.debug("Adding preview tag because its multipartExr") - preview = True new_instance = deepcopy(skeleton) new_instance["productName"] = product_name - new_instance["subsetGroup"] = group_name + new_instance["productGroup"] = group_name + + # toggle preview on if multipart is on + # Because we cant query the multipartExr data member of each AOV we'll + # need to have hardcoded rule of excluding any renders with + # "cryptomatte" in the file name from being a multipart EXR. This issue + # happens with Redshift that forces Cryptomatte renders to be separate + # files even when the rest of the AOVs are merged into a single EXR. + # There might be an edge case where the main instance has cryptomatte + # in the name even though it's a multipart EXR. + if instance.data.get("renderer") == "redshift": + if ( + instance.data.get("multipartExr") and + "cryptomatte" not in render_file_name.lower() + ): + log.debug("Adding preview tag because it's multipartExr") + preview = True + else: + new_instance["multipartExr"] = False + elif instance.data.get("multipartExr"): + log.debug("Adding preview tag because its multipartExr") + preview = True # explicitly disable review by user preview = preview and not do_not_add_review @@ -692,7 +707,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data, return instances -def get_resources(project_name, version, extension=None): +def get_resources(project_name, version_entity, extension=None): """Get the files from the specific version. This will return all get all files from representation. @@ -710,7 +725,7 @@ def get_resources(project_name, version, extension=None): Args: project_name (str): Name of the project. - version (dict): Version document. + version_entity (dict): Version entity. extension (str): extension used to filter representations. @@ -727,13 +742,14 @@ def get_resources(project_name, version, extension=None): # there is a `context_filter` argument that won't probably work in # final release of AYON. SO we'll rather not use it - repre_docs = list(get_representations( - project_name, version_ids=[version["_id"]])) + repre_entities = list(ayon_api.get_representations( + project_name, version_ids={version_entity["id"]} + )) filtered = [] - for doc in repre_docs: - if doc["context"]["ext"] in extensions: - filtered.append(doc) + for repre_entity in repre_entities: + if repre_entity["context"]["ext"] in extensions: + filtered.append(repre_entity) representation = filtered[0] directory = get_representation_path(representation) @@ -749,7 +765,6 @@ def get_resources(project_name, version, extension=None): def create_skeleton_instance_cache(instance): - # type: (pyblish.api.Instance, list, dict) -> dict """Create skeleton instance from original instance data. This will create dictionary containing skeleton @@ -769,7 +784,7 @@ def create_skeleton_instance_cache(instance): context = instance.context data = instance.data.copy() - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] # get time related data from instance (or context) time_data = get_time_data_from_instance_or_context(instance) @@ -1003,20 +1018,24 @@ def copy_extend_frames(instance, representation): start = instance.data.get("frameStart") end = instance.data.get("frameEnd") project_name = instance.context.data["project"] - anatomy = instance.context.data["anatomy"] # type: Anatomy + anatomy = instance.context.data["anatomy"] + + folder_entity = ayon_api.get_folder_by_path( + project_name, instance.data.get("folderPath") + ) # get latest version of product # this will stop if product wasn't published yet - version = get_last_version_by_subset_name( + version_entity = ayon_api.get_last_version_by_product_name( project_name, instance.data.get("productName"), - asset_name=instance.data.get("folderPath") + folder_entity["id"] ) # get its files based on extension product_resources = get_resources( - project_name, version, representation.get("ext") + project_name, version_entity, representation.get("ext") ) r_col, _ = clique.assemble(product_resources) @@ -1088,8 +1107,8 @@ def attach_instances_to_product(attach_to, instances): new_inst["productType"] = attach_instance.get("productType") new_inst["family"] = attach_instance.get("family") new_inst["append"] = True - # don't set subsetGroup if we are attaching - new_inst.pop("subsetGroup") + # don't set productGroup if we are attaching + new_inst.pop("productGroup") new_instances.append(new_inst) return new_instances diff --git a/client/ayon_core/pipeline/farm/pyblish_functions.pyi b/client/ayon_core/pipeline/farm/pyblish_functions.pyi index 16c11aa480..fe0ae57da0 100644 --- a/client/ayon_core/pipeline/farm/pyblish_functions.pyi +++ b/client/ayon_core/pipeline/farm/pyblish_functions.pyi @@ -1,6 +1,6 @@ import pyblish.api from ayon_core.pipeline import Anatomy -from typing import Tuple, Union, List +from typing import Tuple, List class TimeData: diff --git a/client/ayon_core/pipeline/farm/tools.py b/client/ayon_core/pipeline/farm/tools.py index 8ab3b87ff6..0b647340f3 100644 --- a/client/ayon_core/pipeline/farm/tools.py +++ b/client/ayon_core/pipeline/farm/tools.py @@ -54,7 +54,7 @@ def from_published_scene(instance, replace_in_path=True): template_data["comment"] = None anatomy = instance.context.data['anatomy'] - template_obj = anatomy.templates_obj["publish"]["path"] + template_obj = anatomy.get_template_item("publish", "default", "path") template_filled = template_obj.format_strict(template_data) file_path = os.path.normpath(template_filled) diff --git a/client/ayon_core/pipeline/legacy_io.py b/client/ayon_core/pipeline/legacy_io.py deleted file mode 100644 index d5b555845b..0000000000 --- a/client/ayon_core/pipeline/legacy_io.py +++ /dev/null @@ -1,36 +0,0 @@ -import logging -from ayon_core.pipeline import get_current_project_name - -Session = {} - -log = logging.getLogger(__name__) -log.warning( - "DEPRECATION WARNING: 'legacy_io' is deprecated and will be removed in" - " future versions of ayon-core addon." - "\nReading from Session won't give you updated information and changing" - " values won't affect global state of a process." -) - - -def session_data_from_environment(context_keys=False): - return {} - - -def is_installed(): - return False - - -def install(): - pass - - -def uninstall(): - pass - - -def active_project(*args, **kwargs): - return get_current_project_name() - - -def current_project(*args, **kwargs): - return get_current_project_name() diff --git a/client/ayon_core/pipeline/load/__init__.py b/client/ayon_core/pipeline/load/__init__.py index ca11b26211..bdc5ece620 100644 --- a/client/ayon_core/pipeline/load/__init__.py +++ b/client/ayon_core/pipeline/load/__init__.py @@ -8,13 +8,14 @@ from .utils import ( LoaderNotFoundError, get_repres_contexts, - get_contexts_for_repre_docs, - get_subset_contexts, + get_product_contexts, get_representation_context, + get_representation_contexts, + get_representation_contexts_by_ids, load_with_repre_context, - load_with_subset_context, - load_with_subset_contexts, + load_with_product_context, + load_with_product_contexts, load_container, remove_container, @@ -41,7 +42,7 @@ from .utils import ( from .plugins import ( LoaderPlugin, - SubsetLoaderPlugin, + ProductLoaderPlugin, discover_loader_plugins, register_loader_plugin, @@ -62,13 +63,14 @@ __all__ = ( "LoaderNotFoundError", "get_repres_contexts", - "get_contexts_for_repre_docs", - "get_subset_contexts", + "get_product_contexts", "get_representation_context", + "get_representation_contexts", + "get_representation_contexts_by_ids", "load_with_repre_context", - "load_with_subset_context", - "load_with_subset_contexts", + "load_with_product_context", + "load_with_product_contexts", "load_container", "remove_container", @@ -94,7 +96,7 @@ __all__ = ( # plugins.py "LoaderPlugin", - "SubsetLoaderPlugin", + "ProductLoaderPlugin", "discover_loader_plugins", "register_loader_plugin", diff --git a/client/ayon_core/pipeline/load/plugins.py b/client/ayon_core/pipeline/load/plugins.py index 5ac8038e66..2475800cbb 100644 --- a/client/ayon_core/pipeline/load/plugins.py +++ b/client/ayon_core/pipeline/load/plugins.py @@ -2,7 +2,6 @@ import os import logging from ayon_core.settings import get_project_settings -from ayon_core.pipeline import schema from ayon_core.pipeline.plugin_discover import ( discover, register_plugin, @@ -24,8 +23,8 @@ class LoaderPlugin(list): """ - families = [] - representations = [] + product_types = set() + representations = set() extensions = {"*"} order = 0 is_multiple_contexts_compatible = False @@ -33,7 +32,7 @@ class LoaderPlugin(list): options = [] - log = logging.getLogger("SubsetLoader") + log = logging.getLogger("ProductLoader") log.propagate = True @classmethod @@ -76,11 +75,11 @@ class LoaderPlugin(list): setattr(cls, option, value) @classmethod - def has_valid_extension(cls, repre_doc): + def has_valid_extension(cls, repre_entity): """Has representation document valid extension for loader. Args: - repre_doc (dict[str, Any]): Representation document. + repre_entity (dict[str, Any]): Representation entity. Returns: bool: Representation has valid extension @@ -90,11 +89,11 @@ class LoaderPlugin(list): return True # Get representation main file extension from 'context' - repre_context = repre_doc.get("context") or {} + repre_context = repre_entity.get("context") or {} ext = repre_context.get("ext") if not ext: # Legacy way how to get extensions - path = repre_doc.get("data", {}).get("path") + path = repre_entity.get("attrib", {}).get("path") if not path: cls.log.info( "Representation doesn't have known source of extension" @@ -116,9 +115,9 @@ class LoaderPlugin(list): def is_compatible_loader(cls, context): """Return whether a loader is compatible with a context. - On override make sure it is overriden as class or static method. + On override make sure it is overridden as class or static method. - This checks the version's families and the representation for the given + This checks the product type and the representation for the given loader plugin. Args: @@ -130,49 +129,48 @@ class LoaderPlugin(list): """ plugin_repre_names = cls.get_representations() - plugin_families = cls.families + plugin_product_types = cls.product_types if ( not plugin_repre_names - or not plugin_families + or not plugin_product_types or not cls.extensions ): return False - repre_doc = context.get("representation") - if not repre_doc: + repre_entity = context.get("representation") + if not repre_entity: return False plugin_repre_names = set(plugin_repre_names) if ( "*" not in plugin_repre_names - and repre_doc["name"] not in plugin_repre_names + and repre_entity["name"] not in plugin_repre_names ): return False - if not cls.has_valid_extension(repre_doc): + if not cls.has_valid_extension(repre_entity): return False - plugin_families = set(plugin_families) - if "*" in plugin_families: + plugin_product_types = set(plugin_product_types) + if "*" in plugin_product_types: return True - subset_doc = context["subset"] - maj_version, _ = schema.get_schema_version(subset_doc["schema"]) - if maj_version < 3: - families = context["version"]["data"].get("families") - else: - families = subset_doc["data"].get("families") - if families is None: - family = subset_doc["data"].get("family") - if family: - families = [family] + product_entity = context["product"] + product_type = product_entity["productType"] - if not families: - return False - return any(family in plugin_families for family in families) + return product_type in plugin_product_types @classmethod def get_representations(cls): + """Representation names with which is plugin compatible. + + Empty set makes the plugin incompatible with any representation. To + allow compatibility with all representations use '{"*"}'. + + Returns: + set[str]: Names with which is plugin compatible. + + """ return cls.representations @classmethod @@ -192,13 +190,13 @@ class LoaderPlugin(list): raise NotImplementedError("Loader.load() must be " "implemented by subclass") - def update(self, container, representation): + def update(self, container, context): """Update `container` to `representation` - Arguments: + Args: container (avalon-core:container-1.0): Container to update, from `host.ls()`. - representation (dict): Update the container to this representation. + context (dict): Update the container to this representation. """ raise NotImplementedError("Loader.update() must be " @@ -245,7 +243,7 @@ class LoaderPlugin(list): return self._fname -class SubsetLoaderPlugin(LoaderPlugin): +class ProductLoaderPlugin(LoaderPlugin): """Load product into host application Arguments: context (dict): avalon-core:context-1.0 diff --git a/client/ayon_core/pipeline/load/utils.py b/client/ayon_core/pipeline/load/utils.py index 056836d712..f3d39800cd 100644 --- a/client/ayon_core/pipeline/load/utils.py +++ b/client/ayon_core/pipeline/load/utils.py @@ -1,28 +1,13 @@ import os import platform -import copy -import getpass import logging import inspect import collections import numbers +import ayon_api + from ayon_core.host import ILoadHost -from ayon_core.client import ( - get_project, - get_assets, - get_subsets, - get_versions, - get_version_by_id, - get_last_version_by_subset_id, - get_hero_version_by_subset_id, - get_version_by_name, - get_last_versions, - get_representations, - get_representation_by_id, - get_representation_by_name, - get_representation_parents -) from ayon_core.lib import ( StringTemplate, TemplateUnsolved, @@ -97,8 +82,8 @@ def get_repres_contexts(representation_ids, project_name=None): Returns: dict: The full representation context by representation id. - keys are repre_id, value is dictionary with full documents of - asset, subset, version and representation. + keys are repre_id, value is dictionary with entities of + folder, product, version and representation. """ from ayon_core.pipeline import get_current_project_name @@ -108,91 +93,20 @@ def get_repres_contexts(representation_ids, project_name=None): if not project_name: project_name = get_current_project_name() - repre_docs = get_representations(project_name, representation_ids) - - return get_contexts_for_repre_docs(project_name, repre_docs) - - -def get_contexts_for_repre_docs(project_name, repre_docs): - contexts = {} - if not repre_docs: - return contexts - - repre_docs_by_id = {} - version_ids = set() - for repre_doc in repre_docs: - version_ids.add(repre_doc["parent"]) - repre_docs_by_id[repre_doc["_id"]] = repre_doc - - version_docs = get_versions( - project_name, version_ids, hero=True + repre_entities = ayon_api.get_representations( + project_name, representation_ids ) - version_docs_by_id = {} - hero_version_docs = [] - versions_for_hero = set() - subset_ids = set() - for version_doc in version_docs: - if version_doc["type"] == "hero_version": - hero_version_docs.append(version_doc) - versions_for_hero.add(version_doc["version_id"]) - version_docs_by_id[version_doc["_id"]] = version_doc - subset_ids.add(version_doc["parent"]) - - if versions_for_hero: - _version_docs = get_versions(project_name, versions_for_hero) - _version_data_by_id = { - version_doc["_id"]: version_doc["data"] - for version_doc in _version_docs - } - - for hero_version_doc in hero_version_docs: - hero_version_id = hero_version_doc["_id"] - version_id = hero_version_doc["version_id"] - version_data = copy.deepcopy(_version_data_by_id[version_id]) - version_docs_by_id[hero_version_id]["data"] = version_data - - subset_docs = get_subsets(project_name, subset_ids) - subset_docs_by_id = {} - asset_ids = set() - for subset_doc in subset_docs: - subset_docs_by_id[subset_doc["_id"]] = subset_doc - asset_ids.add(subset_doc["parent"]) - - asset_docs = get_assets(project_name, asset_ids) - asset_docs_by_id = { - asset_doc["_id"]: asset_doc - for asset_doc in asset_docs - } - - project_doc = get_project(project_name) - - for repre_id, repre_doc in repre_docs_by_id.items(): - version_doc = version_docs_by_id[repre_doc["parent"]] - subset_doc = subset_docs_by_id[version_doc["parent"]] - asset_doc = asset_docs_by_id[subset_doc["parent"]] - context = { - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "asset": asset_doc, - "subset": subset_doc, - "version": version_doc, - "representation": repre_doc, - } - contexts[repre_id] = context - - return contexts + return get_representation_contexts(project_name, repre_entities) -def get_subset_contexts(subset_ids, project_name=None): - """Return parenthood context for subset. +def get_product_contexts(product_ids, project_name=None): + """Return parenthood context for product. - Provides context on subset granularity - less detail than + Provides context on product granularity - less detail than 'get_repre_contexts'. Args: - subset_ids (list): The subset ids. + product_ids (list): The product ids. project_name (Optional[str]): Project name. Returns: dict: The full representation context by representation id. @@ -200,86 +114,173 @@ def get_subset_contexts(subset_ids, project_name=None): from ayon_core.pipeline import get_current_project_name contexts = {} - if not subset_ids: + if not product_ids: return contexts if not project_name: project_name = get_current_project_name() - subset_docs = get_subsets(project_name, subset_ids) - subset_docs_by_id = {} - asset_ids = set() - for subset_doc in subset_docs: - subset_docs_by_id[subset_doc["_id"]] = subset_doc - asset_ids.add(subset_doc["parent"]) + product_entities = ayon_api.get_products( + project_name, product_ids=product_ids + ) + product_entities_by_id = {} + folder_ids = set() + for product_entity in product_entities: + product_entities_by_id[product_entity["id"]] = product_entity + folder_ids.add(product_entity["folderId"]) - asset_docs = get_assets(project_name, asset_ids) - asset_docs_by_id = { - asset_doc["_id"]: asset_doc - for asset_doc in asset_docs + folder_entities_by_id = { + folder_entity["id"]: folder_entity + for folder_entity in ayon_api.get_folders( + project_name, folder_ids=folder_ids + ) } - project_doc = get_project(project_name) + project_entity = ayon_api.get_project(project_name) - for subset_id, subset_doc in subset_docs_by_id.items(): - asset_doc = asset_docs_by_id[subset_doc["parent"]] + for product_id, product_entity in product_entities_by_id.items(): + folder_entity = folder_entities_by_id[product_entity["folderId"]] context = { - "project": { - "name": project_doc["name"], - "code": project_doc["data"].get("code") - }, - "asset": asset_doc, - "subset": subset_doc + "project": project_entity, + "folder": folder_entity, + "product": product_entity } - contexts[subset_id] = context + contexts[product_id] = context return contexts -def get_representation_context(representation): +def get_representation_contexts(project_name, representation_entities): + """Parenthood context for representations. + + Function fills ``None`` if any entity was not found or could + not be queried. + + Args: + project_name (str): Project name. + representation_entities (Iterable[dict[str, Any]]): Representation + entities. + + Returns: + dict[str, dict[str, Any]]: The full representation context by + representation id. + + """ + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in representation_entities + } + + if not repre_entities_by_id: + return {} + + repre_ids = set(repre_entities_by_id) + + parents_by_repre_id = ayon_api.get_representations_parents( + project_name, repre_ids + ) + output = {} + for repre_id in repre_ids: + repre_entity = repre_entities_by_id[repre_id] + ( + version_entity, + product_entity, + folder_entity, + project_entity + ) = parents_by_repre_id[repre_id] + output[repre_id] = { + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, + "representation": repre_entity, + } + return output + + +def get_representation_contexts_by_ids(project_name, representation_ids): + """Parenthood context for representations found by ids. + + Function fills ``None`` if any entity was not found or could + not be queried. + + Args: + project_name (str): Project name. + representation_ids (Iterable[str]): Representation ids. + + Returns: + dict[str, dict[str, Any]]: The full representation context by + representation id. + + """ + repre_ids = set(representation_ids) + if not repre_ids: + return {} + + # Query representation entities by id + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in ayon_api.get_representations( + project_name, repre_ids + ) + } + output = get_representation_contexts( + project_name, repre_entities_by_id.values() + ) + for repre_id in repre_ids: + if repre_id not in output: + output[repre_id] = { + "project": None, + "folder": None, + "product": None, + "version": None, + "representation": None, + } + return output + + +def get_representation_context(project_name, representation): """Return parenthood context for representation. Args: - representation (str or ObjectId or dict): The representation id - or full representation as returned by the database. + project_name (str): Project name. + representation (Union[dict[str, Any], str]): Representation entity + or representation id. Returns: - dict: The full representation context. + dict[str, dict[str, Any]]: The full representation context. + + Raises: + ValueError: When representation is invalid or parents were not found. + """ - from ayon_core.pipeline import get_current_project_name - - assert representation is not None, "This is a bug" - - project_name = get_current_project_name() - if not isinstance(representation, dict): - representation = get_representation_by_id( - project_name, representation + if not representation: + raise ValueError( + "Invalid argument value {}".format(str(representation)) ) - if not representation: - raise AssertionError("Representation was not found in database") + if isinstance(representation, dict): + repre_entity = representation + repre_id = repre_entity["id"] + context = get_representation_contexts( + project_name, [repre_entity] + )[repre_id] + else: + repre_id = representation + context = get_representation_contexts_by_ids( + project_name, {repre_id} + )[repre_id] - version, subset, asset, project = get_representation_parents( - project_name, representation - ) - if not version: - raise AssertionError("Version was not found in database") - if not subset: - raise AssertionError("Subset was not found in database") - if not asset: - raise AssertionError("Asset was not found in database") - if not project: - raise AssertionError("Project was not found in database") + missing_entities = [] + for key, value in context.items(): + if value is None: + missing_entities.append(key) - context = { - "project": { - "name": project["name"], - "code": project["data"].get("code", '') - }, - "asset": asset, - "subset": subset, - "version": version, - "representation": representation, - } + if missing_entities: + raise ValueError( + "Not able to receive representation parent types: {}".format( + ", ".join(missing_entities) + ) + ) return context @@ -292,7 +293,7 @@ def load_with_repre_context( if not is_compatible_loader(Loader, repre_context): raise IncompatibleLoaderError( "Loader {} is incompatible with {}".format( - Loader.__name__, repre_context["subset"]["name"] + Loader.__name__, repre_context["product"]["name"] ) ) @@ -302,13 +303,13 @@ def load_with_repre_context( assert isinstance(options, dict), "Options must be a dictionary" - # Fallback to subset when name is None + # Fallback to product when name is None if name is None: - name = repre_context["subset"]["name"] + name = repre_context["product"]["name"] log.info( "Running '%s' on '%s'" % ( - Loader.__name__, repre_context["asset"]["name"] + Loader.__name__, repre_context["folder"]["path"] ) ) @@ -322,8 +323,8 @@ def load_with_repre_context( return loader.load(repre_context, name, namespace, options) -def load_with_subset_context( - Loader, subset_context, namespace=None, name=None, options=None, **kwargs +def load_with_product_context( + Loader, product_context, namespace=None, name=None, options=None, **kwargs ): # Ensure options is a dictionary when no explicit options provided @@ -332,21 +333,21 @@ def load_with_subset_context( assert isinstance(options, dict), "Options must be a dictionary" - # Fallback to subset when name is None + # Fallback to product when name is None if name is None: - name = subset_context["subset"]["name"] + name = product_context["product"]["name"] log.info( "Running '%s' on '%s'" % ( - Loader.__name__, subset_context["asset"]["name"] + Loader.__name__, product_context["folder"]["path"] ) ) - return Loader().load(subset_context, name, namespace, options) + return Loader().load(product_context, name, namespace, options) -def load_with_subset_contexts( - Loader, subset_contexts, namespace=None, name=None, options=None, **kwargs +def load_with_product_contexts( + Loader, product_contexts, namespace=None, name=None, options=None, **kwargs ): # Ensure options is a dictionary when no explicit options provided @@ -355,19 +356,21 @@ def load_with_subset_contexts( assert isinstance(options, dict), "Options must be a dictionary" - # Fallback to subset when name is None - joined_subset_names = " | ".join( - context["subset"]["name"] - for context in subset_contexts + # Fallback to product when name is None + joined_product_names = " | ".join( + context["product"]["name"] + for context in product_contexts ) if name is None: - name = joined_subset_names + name = joined_product_names log.info( - "Running '{}' on '{}'".format(Loader.__name__, joined_subset_names) + "Running '{}' on '{}'".format( + Loader.__name__, joined_product_names + ) ) - return Loader().load(subset_contexts, name, namespace, options) + return Loader().load(product_contexts, name, namespace, options) def load_container( @@ -377,10 +380,10 @@ def load_container( Args: Loader (Loader): The loader class to trigger. - representation (str or ObjectId or dict): The representation id + representation (str or dict): The representation id or full representation as returned by the database. namespace (str, Optional): The namespace to assign. Defaults to None. - name (str, Optional): The name to assign. Defaults to subset name. + name (str, Optional): The name to assign. Defaults to product name. options (dict, Optional): Additional options to pass on to the loader. Returns: @@ -391,8 +394,11 @@ def load_container( the representation. """ + from ayon_core.pipeline import get_current_project_name - context = get_representation_context(representation) + context = get_representation_context( + get_current_project_name(), representation + ) return load_with_repre_context( Loader, context, @@ -458,39 +464,54 @@ def update_container(container, version=-1): # Compute the different version from 'representation' project_name = get_current_project_name() - current_representation = get_representation_by_id( + current_representation = ayon_api.get_representation_by_id( project_name, container["representation"] ) assert current_representation is not None, "This is a bug" - current_version = get_version_by_id( - project_name, current_representation["parent"], fields=["parent"] + current_version_id = current_representation["versionId"] + current_version = ayon_api.get_version_by_id( + project_name, current_version_id, fields={"productId"} ) - if version == -1: - new_version = get_last_version_by_subset_id( - project_name, current_version["parent"], fields=["_id"] + if isinstance(version, HeroVersionType): + new_version = ayon_api.get_hero_version_by_product_id( + project_name, current_version["productId"] ) - - elif isinstance(version, HeroVersionType): - new_version = get_hero_version_by_subset_id( - project_name, current_version["parent"], fields=["_id"] + elif version == -1: + new_version = ayon_api.get_last_version_by_product_id( + project_name, current_version["productId"] ) else: - new_version = get_version_by_name( - project_name, version, current_version["parent"], fields=["_id"] + new_version = ayon_api.get_version_by_name( + project_name, version, current_version["productId"] ) - assert new_version is not None, "This is a bug" + if new_version is None: + raise ValueError("Failed to find matching version") - new_representation = get_representation_by_name( - project_name, current_representation["name"], new_version["_id"] + product_entity = ayon_api.get_product_by_id( + project_name, current_version["productId"] ) - assert new_representation is not None, "Representation wasn't found" + folder_entity = ayon_api.get_folder_by_id( + project_name, product_entity["folderId"] + ) + + repre_name = current_representation["name"] + new_representation = ayon_api.get_representation_by_name( + project_name, repre_name, new_version["id"] + ) + if new_representation is None: + raise ValueError( + "Representation '{}' wasn't found on requested version".format( + repre_name + ) + ) path = get_representation_path(new_representation) - assert os.path.exists(path), "Path {} doesn't exist".format(path) + if not path or not os.path.exists(path): + raise ValueError("Path {} doesn't exist".format(path)) # Run update on the Loader for this container Loader = _get_container_loader(container) @@ -499,8 +520,16 @@ def update_container(container, version=-1): "Can't update container because loader '{}' was not found." .format(container.get("loader")) ) + project_entity = ayon_api.get_project(project_name) + context = { + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": new_version, + "representation": new_representation, + } - return Loader().update(container, new_representation) + return Loader().update(container, context) def switch_container(container, representation, loader_plugin=None): @@ -508,7 +537,7 @@ def switch_container(container, representation, loader_plugin=None): Args: container (dict): container information - representation (dict): representation data from document + representation (dict): representation entity Returns: function call @@ -535,21 +564,20 @@ def switch_container(container, representation, loader_plugin=None): # Get the new representation to switch to project_name = get_current_project_name() - new_representation = get_representation_by_id( - project_name, representation["_id"] - ) - new_context = get_representation_context(new_representation) - if not is_compatible_loader(loader_plugin, new_context): + context = get_representation_context( + project_name, representation["id"] + ) + if not is_compatible_loader(loader_plugin, context): raise IncompatibleLoaderError( "Loader {} is incompatible with {}".format( - loader_plugin.__name__, new_context["subset"]["name"] + loader_plugin.__name__, context["product"]["name"] ) ) - loader = loader_plugin(new_context) + loader = loader_plugin(context) - return loader.switch(container, new_representation) + return loader.switch(container, context) def get_representation_path_from_context(context): @@ -557,16 +585,19 @@ def get_representation_path_from_context(context): from ayon_core.pipeline import get_current_project_name representation = context["representation"] - project_doc = context.get("project") + project_entity = context.get("project") root = None - if project_doc and project_doc["name"] != get_current_project_name(): - anatomy = Anatomy(project_doc["name"]) + if ( + project_entity + and project_entity["name"] != get_current_project_name() + ): + anatomy = Anatomy(project_entity["name"]) root = anatomy.roots return get_representation_path(representation, root) -def get_representation_path_with_anatomy(repre_doc, anatomy): +def get_representation_path_with_anatomy(repre_entity, anatomy): """Receive representation path using representation document and anatomy. Anatomy is used to replace 'root' key in representation file. Ideally @@ -578,7 +609,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): imagine the result should also contain paths to possible resources. Args: - repre_doc (Dict[str, Any]): Representation document. + repre_entity (Dict[str, Any]): Representation entity. anatomy (Anatomy): Project anatomy object. Returns: @@ -590,7 +621,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): """ try: - template = repre_doc["data"]["template"] + template = repre_entity["attrib"]["template"] except KeyError: raise InvalidRepresentationContext(( @@ -599,7 +630,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): )) try: - context = repre_doc["context"] + context = repre_entity["context"] context["root"] = anatomy.roots path = StringTemplate.format_strict_template(template, context) @@ -637,7 +668,7 @@ def get_representation_path(representation, root=None): def path_from_representation(): try: - template = representation["data"]["template"] + template = representation["attrib"]["template"] except KeyError: return None @@ -664,10 +695,10 @@ def get_representation_path(representation, root=None): return path def path_from_data(): - if "path" not in representation["data"]: + if "path" not in representation["attrib"]: return None - path = representation["data"]["path"] + path = representation["attrib"]["path"] # Force replacing backslashes with forward slashed if not on # windows if platform.system().lower() != "windows": @@ -704,7 +735,7 @@ def get_representation_path(representation, root=None): def is_compatible_loader(Loader, context): """Return whether a loader is compatible with a context. - This checks the version's families and the representation for the given + This checks the product type and the representation for the given Loader. Returns: @@ -744,8 +775,12 @@ def filter_repre_contexts_by_loader(repre_contexts, loader): def loaders_from_representation(loaders, representation): """Return all compatible loaders for a representation.""" + from ayon_core.pipeline import get_current_project_name - context = get_representation_context(representation) + project_name = get_current_project_name() + context = get_representation_context( + project_name, representation + ) return loaders_from_repre_context(loaders, context) @@ -824,55 +859,57 @@ def filter_containers(containers, project_name): invalid_containers.extend(containers) return output - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, representation_ids=repre_ids, - fields=["_id", "parent"] + fields={"id", "versionId"} ) # Store representations by stringified representation id - repre_docs_by_str_id = {} - repre_docs_by_version_id = collections.defaultdict(list) - for repre_doc in repre_docs: - repre_id = str(repre_doc["_id"]) - version_id = repre_doc["parent"] - repre_docs_by_str_id[repre_id] = repre_doc - repre_docs_by_version_id[version_id].append(repre_doc) + repre_entities_by_id = {} + repre_entities_by_version_id = collections.defaultdict(list) + for repre_entity in repre_entities: + repre_id = repre_entity["id"] + version_id = repre_entity["versionId"] + repre_entities_by_id[repre_id] = repre_entity + repre_entities_by_version_id[version_id].append(repre_entity) - # Query version docs to get it's subset ids + # Query version docs to get it's product ids # - also query hero version to be able identify if representation # belongs to existing version - version_docs = get_versions( + version_entities = ayon_api.get_versions( project_name, - version_ids=repre_docs_by_version_id.keys(), + version_ids=repre_entities_by_version_id.keys(), hero=True, - fields=["_id", "parent", "type"] + fields={"id", "productId", "version"} ) verisons_by_id = {} - versions_by_subset_id = collections.defaultdict(list) + versions_by_product_id = collections.defaultdict(list) hero_version_ids = set() - for version_doc in version_docs: - version_id = version_doc["_id"] + for version_entity in version_entities: + version_id = version_entity["id"] # Store versions by their ids - verisons_by_id[version_id] = version_doc - # There's no need to query subsets for hero versions + verisons_by_id[version_id] = version_entity + # There's no need to query products for hero versions # - they are considered as latest? - if version_doc["type"] == "hero_version": + if version_entity["version"] < 0: hero_version_ids.add(version_id) continue - subset_id = version_doc["parent"] - versions_by_subset_id[subset_id].append(version_doc) + product_id = version_entity["productId"] + versions_by_product_id[product_id].append(version_entity) - last_versions = get_last_versions( + last_versions = ayon_api.get_last_versions( project_name, - subset_ids=versions_by_subset_id.keys(), - fields=["_id"] + versions_by_product_id.keys(), + fields={"id"} ) # Figure out which versions are outdated outdated_version_ids = set() - for subset_id, last_version_doc in last_versions.items(): - for version_doc in versions_by_subset_id[subset_id]: - version_id = version_doc["_id"] - if version_id != last_version_doc["_id"]: + for product_id, last_version_entity in last_versions.items(): + for version_entity in versions_by_product_id[product_id]: + version_id = version_entity["id"] + if version_id in hero_version_ids: + continue + if version_id != last_version_entity["id"]: outdated_version_ids.add(version_id) # Based on all collected data figure out which containers are outdated @@ -884,8 +921,8 @@ def filter_containers(containers, project_name): invalid_containers.append(container) continue - repre_doc = repre_docs_by_str_id.get(repre_id) - if not repre_doc: + repre_entity = repre_entities_by_id.get(repre_id) + if not repre_entity: log.debug(( "Container '{}' has an invalid representation." " It is missing in the database." @@ -893,7 +930,7 @@ def filter_containers(containers, project_name): not_found_containers.append(container) continue - version_id = repre_doc["parent"] + version_id = repre_entity["versionId"] if version_id in outdated_version_ids: outdated_containers.append(container) diff --git a/client/ayon_core/pipeline/publish/README.md b/client/ayon_core/pipeline/publish/README.md index 2a0f45d093..ee2124dfd3 100644 --- a/client/ayon_core/pipeline/publish/README.md +++ b/client/ayon_core/pipeline/publish/README.md @@ -1,8 +1,8 @@ # Publish -OpenPype is using `pyblish` for publishing process which is a little bit extented and modified mainly for UI purposes. OpenPype's (new) publish UI does not allow to enable/disable instances or plugins that can be done during creation part. Also does support actions only for validators after validation exception. +AYON is using `pyblish` for publishing process which is a little bit extented and modified mainly for UI purposes. OpenPype's (new) publish UI does not allow to enable/disable instances or plugins that can be done during creation part. Also does support actions only for validators after validation exception. ## Exceptions -OpenPype define few specific exceptions that should be used in publish plugins. +AYON define few specific exceptions that should be used in publish plugins. ### Validation exception Validation plugins should raise `PublishValidationError` to show to an artist what's wrong and give him actions to fix it. The exception says that error happened in plugin can be fixed by artist himself (with or without action on plugin). Any other errors will stop publishing immediately. Exception `PublishValidationError` raised after validation order has same effect as any other exception. @@ -35,4 +35,4 @@ class MyExtendedPlugin( ### Extensions Currently only extension is ability to define attributes for instances during creation. Method `get_attribute_defs` returns attribute definitions for families defined in plugin's `families` attribute if it's instance plugin or for whole context if it's context plugin. To convert existing values (or to remove legacy values) can be implemented `convert_attribute_values`. Values of publish attributes from created instance are never removed automatically so implementing of this method is best way to remove legacy data or convert them to new data structure. -Possible attribute definitions can be found in `openpype/pipeline/lib/attribute_definitions.py`. +Possible attribute definitions can be found in `ayon_core/lib/attribute_definitions.py`. diff --git a/client/ayon_core/pipeline/publish/abstract_collect_render.py b/client/ayon_core/pipeline/publish/abstract_collect_render.py index 745632ca0a..c50dc16380 100644 --- a/client/ayon_core/pipeline/publish/abstract_collect_render.py +++ b/client/ayon_core/pipeline/publish/abstract_collect_render.py @@ -81,6 +81,9 @@ class RenderInstance(object): outputDir = attr.ib(default=None) context = attr.ib(default=None) + # The source instance the data of this render instance should merge into + source_instance = attr.ib(default=None, type=pyblish.api.Instance) + @frameStart.validator def check_frame_start(self, _, value): """Validate if frame start is not larger then end.""" @@ -214,8 +217,11 @@ class AbstractCollectRender(pyblish.api.ContextPlugin): data = self.add_additional_data(data) render_instance_dict = attr.asdict(render_instance) - instance = context.create_instance(render_instance.name) - instance.data["label"] = render_instance.label + # Merge into source instance if provided, otherwise create instance + instance = render_instance_dict.pop("source_instance", None) + if instance is None: + instance = context.create_instance(render_instance.name) + instance.data.update(render_instance_dict) instance.data.update(data) diff --git a/client/ayon_core/pipeline/publish/constants.py b/client/ayon_core/pipeline/publish/constants.py index 92e3fb089f..38f5ffef3f 100644 --- a/client/ayon_core/pipeline/publish/constants.py +++ b/client/ayon_core/pipeline/publish/constants.py @@ -6,6 +6,6 @@ ValidateContentsOrder = pyblish.api.ValidatorOrder + 0.1 ValidateSceneOrder = pyblish.api.ValidatorOrder + 0.2 ValidateMeshOrder = pyblish.api.ValidatorOrder + 0.3 -DEFAULT_PUBLISH_TEMPLATE = "publish" -DEFAULT_HERO_PUBLISH_TEMPLATE = "hero" -TRANSIENT_DIR_TEMPLATE = "transient" +DEFAULT_PUBLISH_TEMPLATE = "default" +DEFAULT_HERO_PUBLISH_TEMPLATE = "default" +TRANSIENT_DIR_TEMPLATE = "default" diff --git a/client/ayon_core/pipeline/publish/lib.py b/client/ayon_core/pipeline/publish/lib.py index b4ed69b5d7..8d3644637b 100644 --- a/client/ayon_core/pipeline/publish/lib.py +++ b/client/ayon_core/pipeline/publish/lib.py @@ -742,29 +742,18 @@ def get_custom_staging_dir_info( anatomy = Anatomy(project_name) template_name = profile["template_name"] or TRANSIENT_DIR_TEMPLATE - _validate_transient_template(project_name, template_name, anatomy) - custom_staging_dir = anatomy.templates[template_name]["folder"] + custom_staging_dir = anatomy.get_template_item( + "staging", template_name, "directory", default=None + ) + if custom_staging_dir is None: + raise ValueError(( + "Anatomy of project \"{}\" does not have set" + " \"{}\" template key!" + ).format(project_name, template_name)) is_persistent = profile["custom_staging_dir_persistent"] - return custom_staging_dir, is_persistent - - -def _validate_transient_template(project_name, template_name, anatomy): - """Check that transient template is correctly configured. - - Raises: - ValueError - if misconfigured template - """ - if template_name not in anatomy.templates: - raise ValueError(("Anatomy of project \"{}\" does not have set" - " \"{}\" template key!" - ).format(project_name, template_name)) - - if "folder" not in anatomy.templates[template_name]: - raise ValueError(("There is not set \"folder\" template in \"{}\" anatomy" # noqa - " for project \"{}\"." - ).format(template_name, project_name)) + return custom_staging_dir.template, is_persistent def get_published_workfile_instance(context): @@ -815,9 +804,9 @@ def replace_with_published_scene_path(instance, replace_in_path=True): template_data["ext"] = rep.get("ext") template_data["comment"] = None - anatomy = instance.context.data['anatomy'] - anatomy_filled = anatomy.format(template_data) - template_filled = anatomy_filled["publish"]["path"] + anatomy = instance.context.data["anatomy"] + template = anatomy.get_template_item("publish", "default", "path") + template_filled = template.format_strict(template_data) file_path = os.path.normpath(template_filled) log.info("Using published scene for render {}".format(file_path)) diff --git a/client/ayon_core/pipeline/publish/publish_plugins.py b/client/ayon_core/pipeline/publish/publish_plugins.py index 2386558091..6b1984d92b 100644 --- a/client/ayon_core/pipeline/publish/publish_plugins.py +++ b/client/ayon_core/pipeline/publish/publish_plugins.py @@ -2,7 +2,6 @@ import inspect from abc import ABCMeta import pyblish.api from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin -from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS from ayon_core.lib import BoolDef from .lib import ( diff --git a/client/ayon_core/pipeline/schema/__init__.py b/client/ayon_core/pipeline/schema/__init__.py index d7b33f2621..db98a6d080 100644 --- a/client/ayon_core/pipeline/schema/__init__.py +++ b/client/ayon_core/pipeline/schema/__init__.py @@ -13,7 +13,6 @@ Resources: """ import os -import re import json import logging @@ -29,34 +28,6 @@ CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) _CACHED = False -def get_schema_version(schema_name): - """Extract version form schema name. - - It is expected that schema name contain only major and minor version. - - Expected name should match to: - "{name}:{type}-{major version}.{minor version}" - - `name` - must not contain colon - - `type` - must not contain dash - - major and minor versions must be numbers separated by dot - - Args: - schema_name(str): Name of schema that should be parsed. - - Returns: - tuple: Contain two values major version as first and minor version as - second. When schema does not match parsing regex then `(0, 0)` is - returned. - """ - schema_regex = re.compile(r"[^:]+:[^-]+-(\d.\d)") - groups = schema_regex.findall(schema_name) - if not groups: - return 0, 0 - - maj_version, min_version = groups[0].split(".") - return int(maj_version), int(min_version) - - def validate(data, schema=None): """Validate `data` with `schema` @@ -72,11 +43,6 @@ def validate(data, schema=None): _precache() root, schema = data["schema"].rsplit(":", 1) - # assert root in ( - # "mindbender-core", # Backwards compatiblity - # "avalon-core", - # "pype" - # ) if isinstance(schema, six.string_types): schema = _cache[schema + ".json"] diff --git a/client/ayon_core/pipeline/schema/application-1.0.json b/client/ayon_core/pipeline/schema/application-1.0.json deleted file mode 100644 index 953abee569..0000000000 --- a/client/ayon_core/pipeline/schema/application-1.0.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:application-1.0", - "description": "An application definition.", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "label", - "application_dir", - "executable" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string" - }, - "label": { - "description": "Nice name of application.", - "type": "string" - }, - "application_dir": { - "description": "Name of directory used for application resources.", - "type": "string" - }, - "executable": { - "description": "Name of callable executable, this is called to launch the application", - "type": "string" - }, - "description": { - "description": "Description of application.", - "type": "string" - }, - "environment": { - "description": "Key/value pairs for environment variables related to this application. Supports lists for paths, such as PYTHONPATH.", - "type": "object", - "items": { - "oneOf": [ - {"type": "string"}, - {"type": "array", "items": {"type": "string"}} - ] - } - }, - "default_dirs": { - "type": "array", - "items": { - "type": "string" - } - }, - "copy": { - "type": "object", - "patternProperties": { - "^.*$": { - "anyOf": [ - {"type": "string"}, - {"type": "null"} - ] - } - }, - "additionalProperties": false - } - } -} diff --git a/client/ayon_core/pipeline/schema/asset-1.0.json b/client/ayon_core/pipeline/schema/asset-1.0.json deleted file mode 100644 index ab104c002a..0000000000 --- a/client/ayon_core/pipeline/schema/asset-1.0.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:asset-1.0", - "description": "A unit of data", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "name", - "subsets" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string" - }, - "name": { - "description": "Name of directory", - "type": "string" - }, - "subsets": { - "type": "array", - "items": { - "$ref": "subset.json" - } - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/asset-2.0.json b/client/ayon_core/pipeline/schema/asset-2.0.json deleted file mode 100644 index b894d79792..0000000000 --- a/client/ayon_core/pipeline/schema/asset-2.0.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:asset-2.0", - "description": "A unit of data", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "name", - "silo", - "data" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string", - "enum": ["openpype:asset-2.0"], - "example": "openpype:asset-2.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["asset"], - "example": "asset" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of asset", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "Bruce" - }, - "silo": { - "description": "Group or container of asset", - "type": "string", - "example": "assets" - }, - "data": { - "description": "Document metadata", - "type": "object", - "example": {"key": "value"} - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/asset-3.0.json b/client/ayon_core/pipeline/schema/asset-3.0.json deleted file mode 100644 index 948704d2a1..0000000000 --- a/client/ayon_core/pipeline/schema/asset-3.0.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:asset-3.0", - "description": "A unit of data", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "name", - "data" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string", - "enum": ["openpype:asset-3.0"], - "example": "openpype:asset-3.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["asset"], - "example": "asset" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of asset", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "Bruce" - }, - "silo": { - "description": "Group or container of asset", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "assets" - }, - "data": { - "description": "Document metadata", - "type": "object", - "example": {"key": "value"} - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/config-1.0.json b/client/ayon_core/pipeline/schema/config-1.0.json deleted file mode 100644 index 49398a57cd..0000000000 --- a/client/ayon_core/pipeline/schema/config-1.0.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:config-1.0", - "description": "A project configuration.", - - "type": "object", - - "additionalProperties": false, - "required": [ - "tasks", - "apps" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string" - }, - "template": { - "type": "object", - "additionalProperties": false, - "patternProperties": { - "^.*$": { - "type": "string" - } - } - }, - "tasks": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "group": {"type": "string"}, - "label": {"type": "string"} - }, - "required": ["name"] - } - }, - "apps": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "group": {"type": "string"}, - "label": {"type": "string"} - }, - "required": ["name"] - } - }, - "families": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "label": {"type": "string"}, - "hideFilter": {"type": "boolean"} - }, - "required": ["name"] - } - }, - "groups": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "color": {"type": "string"}, - "order": {"type": ["integer", "number"]} - }, - "required": ["name"] - } - }, - "copy": { - "type": "object" - } - } -} diff --git a/client/ayon_core/pipeline/schema/config-1.1.json b/client/ayon_core/pipeline/schema/config-1.1.json deleted file mode 100644 index 6e15514aaf..0000000000 --- a/client/ayon_core/pipeline/schema/config-1.1.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:config-1.1", - "description": "A project configuration.", - - "type": "object", - - "additionalProperties": false, - "required": [ - "tasks", - "apps" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string" - }, - "template": { - "type": "object", - "additionalProperties": false, - "patternProperties": { - "^.*$": { - "type": "string" - } - } - }, - "tasks": { - "type": "object", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "group": {"type": "string"}, - "label": {"type": "string"} - }, - "required": [ - "short_name" - ] - } - }, - "apps": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "group": {"type": "string"}, - "label": {"type": "string"} - }, - "required": ["name"] - } - }, - "families": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "label": {"type": "string"}, - "hideFilter": {"type": "boolean"} - }, - "required": ["name"] - } - }, - "groups": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "color": {"type": "string"}, - "order": {"type": ["integer", "number"]} - }, - "required": ["name"] - } - }, - "copy": { - "type": "object" - } - } -} diff --git a/client/ayon_core/pipeline/schema/config-2.0.json b/client/ayon_core/pipeline/schema/config-2.0.json deleted file mode 100644 index 54b226711a..0000000000 --- a/client/ayon_core/pipeline/schema/config-2.0.json +++ /dev/null @@ -1,87 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:config-2.0", - "description": "A project configuration.", - - "type": "object", - - "additionalProperties": false, - "required": [ - "tasks", - "apps" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string" - }, - "templates": { - "type": "object" - }, - "roots": { - "type": "object" - }, - "imageio": { - "type": "object" - }, - "tasks": { - "type": "object", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "group": {"type": "string"}, - "label": {"type": "string"} - }, - "required": [ - "short_name" - ] - } - }, - "apps": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "group": {"type": "string"}, - "label": {"type": "string"} - }, - "required": ["name"] - } - }, - "families": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "label": {"type": "string"}, - "hideFilter": {"type": "boolean"} - }, - "required": ["name"] - } - }, - "groups": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": {"type": "string"}, - "icon": {"type": "string"}, - "color": {"type": "string"}, - "order": {"type": ["integer", "number"]} - }, - "required": ["name"] - } - }, - "copy": { - "type": "object" - } - } -} diff --git a/client/ayon_core/pipeline/schema/hero_version-1.0.json b/client/ayon_core/pipeline/schema/hero_version-1.0.json deleted file mode 100644 index b720dc2887..0000000000 --- a/client/ayon_core/pipeline/schema/hero_version-1.0.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:hero_version-1.0", - "description": "Hero version of asset", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "version_id", - "schema", - "type", - "parent" - ], - - "properties": { - "_id": { - "description": "Document's id (database will create it's if not entered)", - "example": "ObjectId(592c33475f8c1b064c4d1696)" - }, - "version_id": { - "description": "The version ID from which it was created", - "example": "ObjectId(592c33475f8c1b064c4d1695)" - }, - "schema": { - "description": "The schema associated with this document", - "type": "string", - "enum": ["openpype:hero_version-1.0"], - "example": "openpype:hero_version-1.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["hero_version"], - "example": "hero_version" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "ObjectId(592c33475f8c1b064c4d1697)" - } - } -} diff --git a/client/ayon_core/pipeline/schema/inventory-1.0.json b/client/ayon_core/pipeline/schema/inventory-1.0.json deleted file mode 100644 index 2fe78794ab..0000000000 --- a/client/ayon_core/pipeline/schema/inventory-1.0.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:config-1.0", - "description": "A project configuration.", - - "type": "object", - - "additionalProperties": true -} diff --git a/client/ayon_core/pipeline/schema/inventory-1.1.json b/client/ayon_core/pipeline/schema/inventory-1.1.json deleted file mode 100644 index b61a76b32a..0000000000 --- a/client/ayon_core/pipeline/schema/inventory-1.1.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:config-1.1", - "description": "A project configuration.", - - "type": "object", - - "additionalProperties": true -} diff --git a/client/ayon_core/pipeline/schema/project-2.0.json b/client/ayon_core/pipeline/schema/project-2.0.json deleted file mode 100644 index 0ed5a55599..0000000000 --- a/client/ayon_core/pipeline/schema/project-2.0.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:project-2.0", - "description": "A unit of data", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "name", - "data", - "config" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string", - "enum": ["openpype:project-2.0"], - "example": "openpype:project-2.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["project"], - "example": "project" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of directory", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "hulk" - }, - "data": { - "description": "Document metadata", - "type": "object", - "example": { - "fps": 24, - "width": 1920, - "height": 1080 - } - }, - "config": { - "type": "object", - "description": "Document metadata", - "example": { - "schema": "openpype:config-1.0", - "apps": [ - { - "name": "maya2016", - "label": "Autodesk Maya 2016" - }, - { - "name": "nuke10", - "label": "The Foundry Nuke 10.0" - } - ], - "tasks": [ - {"name": "model"}, - {"name": "render"}, - {"name": "animate"}, - {"name": "rig"}, - {"name": "lookdev"}, - {"name": "layout"} - ], - "template": { - "work": - "{root}/{project}/{silo}/{asset}/work/{task}/{app}", - "publish": - "{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/{subset}.{representation}" - } - }, - "$ref": "config-1.0.json" - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/project-2.1.json b/client/ayon_core/pipeline/schema/project-2.1.json deleted file mode 100644 index 9413c9f691..0000000000 --- a/client/ayon_core/pipeline/schema/project-2.1.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:project-2.1", - "description": "A unit of data", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "name", - "data", - "config" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string", - "enum": ["openpype:project-2.1"], - "example": "openpype:project-2.1" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["project"], - "example": "project" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of directory", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "hulk" - }, - "data": { - "description": "Document metadata", - "type": "object", - "example": { - "fps": 24, - "width": 1920, - "height": 1080 - } - }, - "config": { - "type": "object", - "description": "Document metadata", - "example": { - "schema": "openpype:config-1.1", - "apps": [ - { - "name": "maya2016", - "label": "Autodesk Maya 2016" - }, - { - "name": "nuke10", - "label": "The Foundry Nuke 10.0" - } - ], - "tasks": { - "Model": {"short_name": "mdl"}, - "Render": {"short_name": "rnd"}, - "Animate": {"short_name": "anim"}, - "Rig": {"short_name": "rig"}, - "Lookdev": {"short_name": "look"}, - "Layout": {"short_name": "lay"} - }, - "template": { - "work": - "{root}/{project}/{silo}/{asset}/work/{task}/{app}", - "publish": - "{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/{subset}.{representation}" - } - }, - "$ref": "config-1.1.json" - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/project-3.0.json b/client/ayon_core/pipeline/schema/project-3.0.json deleted file mode 100644 index be23e10c93..0000000000 --- a/client/ayon_core/pipeline/schema/project-3.0.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:project-3.0", - "description": "A unit of data", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "name", - "data", - "config" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string", - "enum": ["openpype:project-3.0"], - "example": "openpype:project-3.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["project"], - "example": "project" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of directory", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "hulk" - }, - "data": { - "description": "Document metadata", - "type": "object", - "example": { - "fps": 24, - "width": 1920, - "height": 1080 - } - }, - "config": { - "type": "object", - "description": "Document metadata", - "$ref": "config-2.0.json" - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/representation-1.0.json b/client/ayon_core/pipeline/schema/representation-1.0.json deleted file mode 100644 index 347c585f52..0000000000 --- a/client/ayon_core/pipeline/schema/representation-1.0.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:representation-1.0", - "description": "The inverse of an instance", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "format", - "path" - ], - - "properties": { - "schema": {"type": "string"}, - "format": { - "description": "File extension, including '.'", - "type": "string" - }, - "path": { - "description": "Unformatted path to version.", - "type": "string" - } - } -} diff --git a/client/ayon_core/pipeline/schema/representation-2.0.json b/client/ayon_core/pipeline/schema/representation-2.0.json deleted file mode 100644 index f47c16a10a..0000000000 --- a/client/ayon_core/pipeline/schema/representation-2.0.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:representation-2.0", - "description": "The inverse of an instance", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "parent", - "name", - "data" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string", - "enum": ["openpype:representation-2.0"], - "example": "openpype:representation-2.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["representation"], - "example": "representation" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of representation", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "abc" - }, - "data": { - "description": "Document metadata", - "type": "object", - "example": { - "label": "Alembic" - } - }, - "dependencies": { - "description": "Other representation that this representation depends on", - "type": "array", - "items": {"type": "string"}, - "example": [ - "592d547a5f8c1b388093c145" - ] - }, - "context": { - "description": "Summary of the context to which this representation belong.", - "type": "object", - "properties": { - "project": {"type": "object"}, - "asset": {"type": "string"}, - "silo": {"type": ["string", "null"]}, - "subset": {"type": "string"}, - "version": {"type": "number"}, - "representation": {"type": "string"} - }, - "example": { - "project": "hulk", - "asset": "Bruce", - "silo": "assets", - "subset": "rigDefault", - "version": 12, - "representation": "ma" - } - } - } -} diff --git a/client/ayon_core/pipeline/schema/session-1.0.json b/client/ayon_core/pipeline/schema/session-1.0.json deleted file mode 100644 index 5ced0a6f08..0000000000 --- a/client/ayon_core/pipeline/schema/session-1.0.json +++ /dev/null @@ -1,143 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:session-1.0", - "description": "The Avalon environment", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "AVALON_PROJECTS", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_SILO", - "AVALON_CONFIG" - ], - - "properties": { - "AVALON_PROJECTS": { - "description": "Absolute path to root of project directories", - "type": "string", - "example": "/nas/projects" - }, - "AVALON_PROJECT": { - "description": "Name of project", - "type": "string", - "pattern": "^\\w*$", - "example": "Hulk" - }, - "AVALON_ASSET": { - "description": "Name of asset", - "type": "string", - "pattern": "^\\w*$", - "example": "Bruce" - }, - "AVALON_SILO": { - "description": "Name of asset group or container", - "type": "string", - "pattern": "^\\w*$", - "example": "assets" - }, - "AVALON_TASK": { - "description": "Name of task", - "type": "string", - "pattern": "^\\w*$", - "example": "modeling" - }, - "AVALON_CONFIG": { - "description": "Name of Avalon configuration", - "type": "string", - "pattern": "^\\w*$", - "example": "polly" - }, - "AVALON_APP": { - "description": "Name of application", - "type": "string", - "pattern": "^\\w*$", - "example": "maya2016" - }, - "AVALON_MONGO": { - "description": "Address to the asset database", - "type": "string", - "pattern": "^mongodb://[\\w/@:.]*$", - "example": "mongodb://localhost:27017", - "default": "mongodb://localhost:27017" - }, - "AVALON_DB": { - "description": "Name of database", - "type": "string", - "pattern": "^\\w*$", - "example": "avalon", - "default": "avalon" - }, - "AVALON_LABEL": { - "description": "Nice name of Avalon, used in e.g. graphical user interfaces", - "type": "string", - "example": "Mindbender", - "default": "Avalon" - }, - "AVALON_SENTRY": { - "description": "Address to Sentry", - "type": "string", - "pattern": "^http[\\w/@:.]*$", - "example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2", - "default": null - }, - "AVALON_DEADLINE": { - "description": "Address to Deadline", - "type": "string", - "pattern": "^http[\\w/@:.]*$", - "example": "http://192.168.99.101", - "default": null - }, - "AVALON_TIMEOUT": { - "description": "Wherever there is a need for a timeout, this is the default value.", - "type": "string", - "pattern": "^[0-9]*$", - "default": "1000", - "example": "1000" - }, - "AVALON_UPLOAD": { - "description": "Boolean of whether to upload published material to central asset repository", - "type": "string", - "default": null, - "example": "True" - }, - "AVALON_USERNAME": { - "description": "Generic username", - "type": "string", - "pattern": "^\\w*$", - "default": "avalon", - "example": "myself" - }, - "AVALON_PASSWORD": { - "description": "Generic password", - "type": "string", - "pattern": "^\\w*$", - "default": "secret", - "example": "abc123" - }, - "AVALON_INSTANCE_ID": { - "description": "Unique identifier for instances in a working file", - "type": "string", - "pattern": "^[\\w.]*$", - "default": "avalon.instance", - "example": "avalon.instance" - }, - "AVALON_CONTAINER_ID": { - "description": "Unique identifier for a loaded representation in a working file", - "type": "string", - "pattern": "^[\\w.]*$", - "default": "avalon.container", - "example": "avalon.container" - }, - "AVALON_DEBUG": { - "description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.", - "type": "string", - "default": null, - "example": "True" - } - } -} diff --git a/client/ayon_core/pipeline/schema/session-2.0.json b/client/ayon_core/pipeline/schema/session-2.0.json deleted file mode 100644 index 0a4d51beb2..0000000000 --- a/client/ayon_core/pipeline/schema/session-2.0.json +++ /dev/null @@ -1,134 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:session-2.0", - "description": "The Avalon environment", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_CONFIG" - ], - - "properties": { - "AVALON_PROJECTS": { - "description": "Absolute path to root of project directories", - "type": "string", - "example": "/nas/projects" - }, - "AVALON_PROJECT": { - "description": "Name of project", - "type": "string", - "pattern": "^\\w*$", - "example": "Hulk" - }, - "AVALON_ASSET": { - "description": "Name of asset", - "type": "string", - "pattern": "^\\w*$", - "example": "Bruce" - }, - "AVALON_SILO": { - "description": "Name of asset group or container", - "type": "string", - "pattern": "^\\w*$", - "example": "assets" - }, - "AVALON_TASK": { - "description": "Name of task", - "type": "string", - "pattern": "^\\w*$", - "example": "modeling" - }, - "AVALON_CONFIG": { - "description": "Name of Avalon configuration", - "type": "string", - "pattern": "^\\w*$", - "example": "polly" - }, - "AVALON_APP": { - "description": "Name of application", - "type": "string", - "pattern": "^\\w*$", - "example": "maya2016" - }, - "AVALON_DB": { - "description": "Name of database", - "type": "string", - "pattern": "^\\w*$", - "example": "avalon", - "default": "avalon" - }, - "AVALON_LABEL": { - "description": "Nice name of Avalon, used in e.g. graphical user interfaces", - "type": "string", - "example": "Mindbender", - "default": "Avalon" - }, - "AVALON_SENTRY": { - "description": "Address to Sentry", - "type": "string", - "pattern": "^http[\\w/@:.]*$", - "example": "https://5b872b280de742919b115bdc8da076a5:8d278266fe764361b8fa6024af004a9c@logs.mindbender.com/2", - "default": null - }, - "AVALON_DEADLINE": { - "description": "Address to Deadline", - "type": "string", - "pattern": "^http[\\w/@:.]*$", - "example": "http://192.168.99.101", - "default": null - }, - "AVALON_TIMEOUT": { - "description": "Wherever there is a need for a timeout, this is the default value.", - "type": "string", - "pattern": "^[0-9]*$", - "default": "1000", - "example": "1000" - }, - "AVALON_UPLOAD": { - "description": "Boolean of whether to upload published material to central asset repository", - "type": "string", - "default": null, - "example": "True" - }, - "AVALON_USERNAME": { - "description": "Generic username", - "type": "string", - "pattern": "^\\w*$", - "default": "avalon", - "example": "myself" - }, - "AVALON_PASSWORD": { - "description": "Generic password", - "type": "string", - "pattern": "^\\w*$", - "default": "secret", - "example": "abc123" - }, - "AVALON_INSTANCE_ID": { - "description": "Unique identifier for instances in a working file", - "type": "string", - "pattern": "^[\\w.]*$", - "default": "avalon.instance", - "example": "avalon.instance" - }, - "AVALON_CONTAINER_ID": { - "description": "Unique identifier for a loaded representation in a working file", - "type": "string", - "pattern": "^[\\w.]*$", - "default": "avalon.container", - "example": "avalon.container" - }, - "AVALON_DEBUG": { - "description": "Enable debugging mode. Some applications may use this for e.g. extended verbosity or mock plug-ins.", - "type": "string", - "default": null, - "example": "True" - } - } -} diff --git a/client/ayon_core/pipeline/schema/session-3.0.json b/client/ayon_core/pipeline/schema/session-3.0.json deleted file mode 100644 index 9f785939e4..0000000000 --- a/client/ayon_core/pipeline/schema/session-3.0.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:session-3.0", - "description": "The Avalon environment", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "AVALON_PROJECT", - "AVALON_ASSET" - ], - - "properties": { - "AVALON_PROJECTS": { - "description": "Absolute path to root of project directories", - "type": "string", - "example": "/nas/projects" - }, - "AVALON_PROJECT": { - "description": "Name of project", - "type": "string", - "pattern": "^\\w*$", - "example": "Hulk" - }, - "AVALON_ASSET": { - "description": "Name of asset", - "type": "string", - "pattern": "^\\w*$", - "example": "Bruce" - }, - "AVALON_TASK": { - "description": "Name of task", - "type": "string", - "pattern": "^\\w*$", - "example": "modeling" - }, - "AVALON_APP": { - "description": "Name of host", - "type": "string", - "pattern": "^\\w*$", - "example": "maya2016" - }, - "AVALON_DB": { - "description": "Name of database", - "type": "string", - "pattern": "^\\w*$", - "example": "avalon", - "default": "avalon" - }, - "AVALON_LABEL": { - "description": "Nice name of Avalon, used in e.g. graphical user interfaces", - "type": "string", - "example": "Mindbender", - "default": "Avalon" - }, - "AVALON_TIMEOUT": { - "description": "Wherever there is a need for a timeout, this is the default value.", - "type": "string", - "pattern": "^[0-9]*$", - "default": "1000", - "example": "1000" - }, - "AVALON_INSTANCE_ID": { - "description": "Unique identifier for instances in a working file", - "type": "string", - "pattern": "^[\\w.]*$", - "default": "avalon.instance", - "example": "avalon.instance" - }, - "AVALON_CONTAINER_ID": { - "description": "Unique identifier for a loaded representation in a working file", - "type": "string", - "pattern": "^[\\w.]*$", - "default": "avalon.container", - "example": "avalon.container" - } - } -} diff --git a/client/ayon_core/pipeline/schema/session-4.0.json b/client/ayon_core/pipeline/schema/session-4.0.json deleted file mode 100644 index 0dab48aa46..0000000000 --- a/client/ayon_core/pipeline/schema/session-4.0.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:session-4.0", - "description": "The Avalon environment", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "AVALON_PROJECT" - ], - - "properties": { - "AVALON_PROJECT": { - "description": "Name of project", - "type": "string", - "pattern": "^\\w*$", - "example": "Hulk" - }, - "AVALON_ASSET": { - "description": "Name of asset", - "type": "string", - "pattern": "^[\\/\\w]*$", - "example": "Bruce" - }, - "AVALON_TASK": { - "description": "Name of task", - "type": "string", - "pattern": "^\\w*$", - "example": "modeling" - }, - "AVALON_APP": { - "description": "Name of host", - "type": "string", - "pattern": "^\\w*$", - "example": "maya" - }, - "AVALON_DB": { - "description": "Name of database", - "type": "string", - "pattern": "^\\w*$", - "example": "avalon", - "default": "avalon" - }, - "AVALON_LABEL": { - "description": "Nice name of Avalon, used in e.g. graphical user interfaces", - "type": "string", - "example": "MyLabel", - "default": "Avalon" - }, - "AVALON_TIMEOUT": { - "description": "Wherever there is a need for a timeout, this is the default value.", - "type": "string", - "pattern": "^[0-9]*$", - "default": "1000", - "example": "1000" - } - } -} diff --git a/client/ayon_core/pipeline/schema/shaders-1.0.json b/client/ayon_core/pipeline/schema/shaders-1.0.json deleted file mode 100644 index 7102ba1861..0000000000 --- a/client/ayon_core/pipeline/schema/shaders-1.0.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:shaders-1.0", - "description": "Relationships between shaders and Avalon IDs", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "shader" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string" - }, - "shader": { - "description": "Name of directory", - "type": "array", - "items": { - "type": "str", - "description": "Avalon ID and optional face indexes, e.g. 'f9520572-ac1d-11e6-b39e-3085a99791c9.f[5002:5185]'" - } - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/subset-1.0.json b/client/ayon_core/pipeline/schema/subset-1.0.json deleted file mode 100644 index a299a6d341..0000000000 --- a/client/ayon_core/pipeline/schema/subset-1.0.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:subset-1.0", - "description": "A container of instances", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "name", - "versions" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string" - }, - "name": { - "description": "Name of directory", - "type": "string" - }, - "versions": { - "type": "array", - "items": { - "$ref": "version.json" - } - } - }, - - "definitions": {} -} diff --git a/client/ayon_core/pipeline/schema/subset-2.0.json b/client/ayon_core/pipeline/schema/subset-2.0.json deleted file mode 100644 index db256ec7fb..0000000000 --- a/client/ayon_core/pipeline/schema/subset-2.0.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:subset-2.0", - "description": "A container of instances", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "parent", - "name", - "data" - ], - - "properties": { - "schema": { - "description": "The schema associated with this document", - "type": "string", - "enum": ["openpype:subset-2.0"], - "example": "openpype:subset-2.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["subset"], - "example": "subset" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of directory", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "shot01" - }, - "data": { - "type": "object", - "description": "Document metadata", - "example": { - "frameStart": 1000, - "frameEnd": 1201 - } - } - } -} diff --git a/client/ayon_core/pipeline/schema/subset-3.0.json b/client/ayon_core/pipeline/schema/subset-3.0.json deleted file mode 100644 index 1a0db53c04..0000000000 --- a/client/ayon_core/pipeline/schema/subset-3.0.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:subset-3.0", - "description": "A container of instances", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "parent", - "name", - "data" - ], - - "properties": { - "schema": { - "description": "The schema associated with this document", - "type": "string", - "enum": ["openpype:subset-3.0"], - "example": "openpype:subset-3.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["subset"], - "example": "subset" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Name of directory", - "type": "string", - "pattern": "^[a-zA-Z0-9_.]*$", - "example": "shot01" - }, - "data": { - "description": "Document metadata", - "type": "object", - "required": ["families"], - "properties": { - "families": { - "type": "array", - "items": {"type": "string"}, - "description": "One or more families associated with this subset" - } - }, - "example": { - "families" : [ - "avalon.camera" - ], - "frameStart": 1000, - "frameEnd": 1201 - } - } - } -} diff --git a/client/ayon_core/pipeline/schema/thumbnail-1.0.json b/client/ayon_core/pipeline/schema/thumbnail-1.0.json deleted file mode 100644 index 5bdf78a4b1..0000000000 --- a/client/ayon_core/pipeline/schema/thumbnail-1.0.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:thumbnail-1.0", - "description": "Entity with thumbnail data", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "data" - ], - - "properties": { - "schema": { - "description": "The schema associated with this document", - "type": "string", - "enum": ["openpype:thumbnail-1.0"], - "example": "openpype:thumbnail-1.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["thumbnail"], - "example": "thumbnail" - }, - "data": { - "description": "Thumbnail data", - "type": "object", - "example": { - "binary_data": "Binary({byte data of image})", - "template": "{thumbnail_root}/{project[name]}/{_id}{ext}}", - "template_data": { - "ext": ".jpg" - } - } - } - } -} diff --git a/client/ayon_core/pipeline/schema/version-1.0.json b/client/ayon_core/pipeline/schema/version-1.0.json deleted file mode 100644 index daa1997721..0000000000 --- a/client/ayon_core/pipeline/schema/version-1.0.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:version-1.0", - "description": "An individual version", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "version", - "path", - "time", - "author", - "source", - "representations" - ], - - "properties": { - "schema": {"type": "string"}, - "representations": { - "type": "array", - "items": { - "$ref": "representation.json" - } - }, - "time": { - "description": "ISO formatted, file-system compatible time", - "type": "string" - }, - "author": { - "description": "User logged on to the machine at time of publish", - "type": "string" - }, - "version": { - "description": "Number of this version", - "type": "number" - }, - "path": { - "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001", - "type": "string" - }, - "source": { - "description": "Original file from which this version was made.", - "type": "string" - } - } -} diff --git a/client/ayon_core/pipeline/schema/version-2.0.json b/client/ayon_core/pipeline/schema/version-2.0.json deleted file mode 100644 index 099e9be70a..0000000000 --- a/client/ayon_core/pipeline/schema/version-2.0.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:version-2.0", - "description": "An individual version", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "parent", - "name", - "data" - ], - - "properties": { - "schema": { - "description": "The schema associated with this document", - "type": "string", - "enum": ["openpype:version-2.0"], - "example": "openpype:version-2.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["version"], - "example": "version" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Number of version", - "type": "number", - "example": 12 - }, - "locations": { - "description": "Where on the planet this version can be found.", - "type": "array", - "items": {"type": "string"}, - "example": ["data.avalon.com"] - }, - "data": { - "description": "Document metadata", - "type": "object", - "required": ["families", "author", "source", "time"], - "properties": { - "time": { - "description": "ISO formatted, file-system compatible time", - "type": "string" - }, - "timeFormat": { - "description": "ISO format of time", - "type": "string" - }, - "author": { - "description": "User logged on to the machine at time of publish", - "type": "string" - }, - "version": { - "description": "Number of this version", - "type": "number" - }, - "path": { - "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001", - "type": "string" - }, - "source": { - "description": "Original file from which this version was made.", - "type": "string" - }, - "families": { - "type": "array", - "items": {"type": "string"}, - "description": "One or more families associated with this version" - } - }, - "example": { - "source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma", - "author" : "marcus", - "families" : [ - "avalon.model" - ], - "time" : "20170510T090203Z" - } - } - } -} diff --git a/client/ayon_core/pipeline/schema/version-3.0.json b/client/ayon_core/pipeline/schema/version-3.0.json deleted file mode 100644 index 3e07fc4499..0000000000 --- a/client/ayon_core/pipeline/schema/version-3.0.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:version-3.0", - "description": "An individual version", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "parent", - "name", - "data" - ], - - "properties": { - "schema": { - "description": "The schema associated with this document", - "type": "string", - "enum": ["openpype:version-3.0"], - "example": "openpype:version-3.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["version"], - "example": "version" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "name": { - "description": "Number of version", - "type": "number", - "example": 12 - }, - "locations": { - "description": "Where on the planet this version can be found.", - "type": "array", - "items": {"type": "string"}, - "example": ["data.avalon.com"] - }, - "data": { - "description": "Document metadata", - "type": "object", - "required": ["author", "source", "time"], - "properties": { - "time": { - "description": "ISO formatted, file-system compatible time", - "type": "string" - }, - "timeFormat": { - "description": "ISO format of time", - "type": "string" - }, - "author": { - "description": "User logged on to the machine at time of publish", - "type": "string" - }, - "version": { - "description": "Number of this version", - "type": "number" - }, - "path": { - "description": "Unformatted path, e.g. '{root}/assets/Bruce/publish/lookdevDefault/v001", - "type": "string" - }, - "source": { - "description": "Original file from which this version was made.", - "type": "string" - } - }, - "example": { - "source" : "{root}/f02_prod/assets/BubbleWitch/work/modeling/marcus/maya/scenes/model_v001.ma", - "author" : "marcus", - "time" : "20170510T090203Z" - } - } - } -} diff --git a/client/ayon_core/pipeline/schema/workfile-1.0.json b/client/ayon_core/pipeline/schema/workfile-1.0.json deleted file mode 100644 index 5f9600ef20..0000000000 --- a/client/ayon_core/pipeline/schema/workfile-1.0.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "title": "openpype:workfile-1.0", - "description": "Workfile additional information.", - - "type": "object", - - "additionalProperties": true, - - "required": [ - "schema", - "type", - "filename", - "task_name", - "parent" - ], - - "properties": { - "schema": { - "description": "Schema identifier for payload", - "type": "string", - "enum": ["openpype:workfile-1.0"], - "example": "openpype:workfile-1.0" - }, - "type": { - "description": "The type of document", - "type": "string", - "enum": ["workfile"], - "example": "workfile" - }, - "parent": { - "description": "Unique identifier to parent document", - "example": "592c33475f8c1b064c4d1696" - }, - "filename": { - "description": "Workfile's filename", - "type": "string", - "example": "kuba_each_case_Alpaca_01_animation_v001.ma" - }, - "task_name": { - "description": "Task name", - "type": "string", - "example": "animation" - }, - "data": { - "description": "Document metadata", - "type": "object", - "example": {"key": "value"} - } - } -} diff --git a/client/ayon_core/pipeline/template_data.py b/client/ayon_core/pipeline/template_data.py index e9c57521d4..526c7d35c5 100644 --- a/client/ayon_core/pipeline/template_data.py +++ b/client/ayon_core/pipeline/template_data.py @@ -1,4 +1,5 @@ -from ayon_core.client import get_project, get_asset_by_name +import ayon_api + from ayon_core.settings import get_studio_settings from ayon_core.lib.local_settings import get_ayon_username @@ -27,13 +28,12 @@ def get_general_template_data(settings=None): } -def get_project_template_data(project_doc=None, project_name=None): +def get_project_template_data(project_entity=None, project_name=None): """Extract data from project document that are used in templates. - Project document must have 'name' and (at this moment) optional - key 'data.code'. + Project document must have 'name' and 'code'. - One of 'project_name' or 'project_doc' must be passed. With prepared + One of 'project_name' or 'project_entity' must be passed. With prepared project document is function much faster because don't have to query. Output contains formatting keys: @@ -41,7 +41,7 @@ def get_project_template_data(project_doc=None, project_name=None): - 'project[code]' - Project code Args: - project_doc (Dict[str, Any]): Queried project document. + project_entity (Dict[str, Any]): Queried project entity. project_name (str): Name of project. Returns: @@ -49,12 +49,12 @@ def get_project_template_data(project_doc=None, project_name=None): """ if not project_name: - project_name = project_doc["name"] + project_name = project_entity["name"] - if not project_doc: - project_doc = get_project(project_name, fields=["data.code"]) + elif not project_entity: + project_entity = ayon_api.get_project(project_name, fields=["code"]) - project_code = project_doc.get("data", {}).get("code") + project_code = project_entity["code"] return { "project": { "name": project_name, @@ -63,86 +63,74 @@ def get_project_template_data(project_doc=None, project_name=None): } -def get_asset_template_data(asset_doc, project_name): - """Extract data from asset document that are used in templates. +def get_folder_template_data(folder_entity, project_name): + """Extract data from folder entity that are used in templates. Output dictionary contains keys: - - 'asset' - asset name - - 'hierarchy' - parent asset names joined with '/' - - 'parent' - direct parent name, project name used if is under project + - 'folder' - dictionary with 'name' key filled with folder name + - 'asset' - folder name + - 'hierarchy' - parent folder names joined with '/' + - 'parent' - direct parent name, project name used if is under + project Required document fields: - Asset: 'name', 'data.parents' + Folder: 'path' -> Plan to require: 'folderType' Args: - asset_doc (Dict[str, Any]): Queried asset document. - project_name (str): Is used for 'parent' key if asset doc does not have - any. + folder_entity (Dict[str, Any]): Folder entity. + project_name (str): Is used for 'parent' key if folder entity + does not have any. Returns: - Dict[str, str]: Data that are based on asset document and can be used + Dict[str, str]: Data that are based on folder entity and can be used in templates. """ - asset_parents = asset_doc["data"]["parents"] - hierarchy = "/".join(asset_parents) - if asset_parents: - parent_name = asset_parents[-1] + path = folder_entity["path"] + hierarchy_parts = path.split("/") + # Remove empty string from the beginning + hierarchy_parts.pop(0) + # Remove last part which is folder name + folder_name = hierarchy_parts.pop(-1) + hierarchy = "/".join(hierarchy_parts) + if hierarchy_parts: + parent_name = hierarchy_parts[-1] else: parent_name = project_name return { - "asset": asset_doc["name"], "folder": { - "name": asset_doc["name"] + "name": folder_name, }, + "asset": folder_name, "hierarchy": hierarchy, "parent": parent_name } -def get_task_type(asset_doc, task_name): - """Get task type based on asset document and task name. +def get_task_template_data(project_entity, task_entity): + """Prepare task template data. Required document fields: - Asset: 'data.tasks' + Project: 'tasksTypes' + Task: 'type' Args: - asset_doc (Dict[str, Any]): Queried asset document. - task_name (str): Task name which is under asset. - - Returns: - str: Task type name. - None: Task was not found on asset document. - """ - - asset_tasks_info = asset_doc["data"]["tasks"] - return asset_tasks_info.get(task_name, {}).get("type") - - -def get_task_template_data(project_doc, asset_doc, task_name): - """"Extract task specific data from project and asset documents. - - Required document fields: - Project: 'config.tasks' - Asset: 'data.tasks'. - - Args: - project_doc (Dict[str, Any]): Queried project document. - asset_doc (Dict[str, Any]): Queried asset document. - task_name (str): Name of task for which data should be returned. + project_entity (Dict[str, Any]): Project entity. + task_entity (Dict[str, Any]): Task entity. Returns: Dict[str, Dict[str, str]]: Template data - """ - project_task_types = project_doc["config"]["tasks"] - task_type = get_task_type(asset_doc, task_name) - task_code = project_task_types.get(task_type, {}).get("short_name") + """ + project_task_types = project_entity["taskTypes"] + task_types_by_name = {task["name"]: task for task in project_task_types} + task_type = task_entity["taskType"] + task_code = task_types_by_name.get(task_type, {}).get("shortName") return { "task": { - "name": task_name, + "name": task_entity["name"], "type": task_type, "short": task_code, } @@ -150,11 +138,11 @@ def get_task_template_data(project_doc, asset_doc, task_name): def get_template_data( - project_doc, - asset_doc=None, - task_name=None, + project_entity, + folder_entity=None, + task_entity=None, host_name=None, - settings=None + settings=None, ): """Prepare data for templates filling from entered documents and info. @@ -166,14 +154,15 @@ def get_template_data( and their values won't be added to template data if are not passed. Required document fields: - Project: 'name', 'data.code', 'config.tasks' - Asset: 'name', 'data.parents', 'data.tasks' + Project: 'name', 'code', 'taskTypes.name' + Folder: 'name', 'path' + Task: 'type' Args: - project_doc (Dict[str, Any]): Mongo document of project from MongoDB. - asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. - task_name (Union[str, None]): Task name under passed asset. - host_name (Union[str, None]): Used to fill '{app}' key. + project_entity (Dict[str, Any]): Project entity. + folder_entity (Optional[Dict[str, Any]]): Folder entity. + task_entity (Optional[Dict[str, Any]): Task entity. + host_name (Optional[str]): Used to fill '{app}' key. settings (Union[Dict, None]): Prepared studio or project settings. They're queried if not passed (may be slower). @@ -182,14 +171,14 @@ def get_template_data( """ template_data = get_general_template_data(settings) - template_data.update(get_project_template_data(project_doc)) - if asset_doc: - template_data.update(get_asset_template_data( - asset_doc, project_doc["name"] + template_data.update(get_project_template_data(project_entity)) + if folder_entity: + template_data.update(get_folder_template_data( + folder_entity, project_entity["name"] )) - if task_name: + if task_entity: template_data.update(get_task_template_data( - project_doc, asset_doc, task_name + project_entity, task_entity )) if host_name: @@ -200,7 +189,7 @@ def get_template_data( def get_template_data_with_names( project_name, - asset_name=None, + folder_path=None, task_name=None, host_name=None, settings=None @@ -211,30 +200,31 @@ def get_template_data_with_names( Only difference is that documents are queried. Args: - project_name (str): Project name for which template data are - calculated. - asset_name (Union[str, None]): Asset name for which template data are - calculated. - task_name (Union[str, None]): Task name under passed asset. - host_name (Union[str, None]):Used to fill '{app}' key. + project_name (str): Project name. + folder_path (Optional[str]): Folder path. + task_name (Optional[str]): Task name. + host_name (Optional[str]):Used to fill '{app}' key. because workdir template may contain `{app}` key. - settings (Union[Dict, None]): Prepared studio or project settings. + settings (Optional[Dict]): Prepared studio or project settings. They're queried if not passed. Returns: Dict[str, Any]: Data prepared for filling workdir template. """ - project_doc = get_project( - project_name, fields=["name", "data.code", "config.tasks"] - ) - asset_doc = None - if asset_name: - asset_doc = get_asset_by_name( + project_entity = ayon_api.get_project(project_name) + folder_entity = None + task_entity = None + if folder_path: + folder_entity = ayon_api.get_folder_by_path( project_name, - asset_name, - fields=["name", "data.parents", "data.tasks"] + folder_path, + fields={"id", "path", "folderType"} ) + if task_name and folder_entity: + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) return get_template_data( - project_doc, asset_doc, task_name, host_name, settings + project_entity, folder_entity, task_entity, host_name, settings ) diff --git a/client/ayon_core/pipeline/usdlib.py b/client/ayon_core/pipeline/usdlib.py index 2a5a317d72..1c7943441e 100644 --- a/client/ayon_core/pipeline/usdlib.py +++ b/client/ayon_core/pipeline/usdlib.py @@ -2,14 +2,15 @@ import os import re import logging +import ayon_api try: from pxr import Usd, UsdGeom, Sdf, Kind except ImportError: # Allow to fall back on Multiverse 6.3.0+ pxr usd library from mvpxr import Usd, UsdGeom, Sdf, Kind -from ayon_core.client import get_project, get_asset_by_name from ayon_core.pipeline import Anatomy, get_current_project_name +from ayon_core.pipeline.template_data import get_template_data log = logging.getLogger(__name__) @@ -118,7 +119,7 @@ def create_shot(filepath, layers, create_layers=False): return filepath -def create_model(filename, asset, variant_subsets): +def create_model(filename, folder_path, variant_product_names): """Create a USD Model file. For each of the variation paths it will payload the path and set its @@ -127,22 +128,24 @@ def create_model(filename, asset, variant_subsets): """ project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + assert folder_entity, "Folder not found: %s" % folder_path variants = [] - for subset in variant_subsets: + for product_name in variant_product_names: prefix = "usdModel" - if subset.startswith(prefix): + if product_name.startswith(prefix): # Strip off `usdModel_` - variant = subset[len(prefix):] + variant = product_name[len(prefix):] else: raise ValueError( - "Model subsets must start " "with usdModel: %s" % subset + "Model products must start with usdModel: %s" % product_name ) path = get_usd_master_path( - asset=asset_doc, subset=subset, representation="usd" + folder_entity=folder_entity, + product_name=product_name, + representation="usd" ) variants.append((variant, path)) @@ -169,33 +172,37 @@ def create_model(filename, asset, variant_subsets): stage.GetRootLayer().Save() -def create_shade(filename, asset, variant_subsets): +def create_shade(filename, folder_path, variant_product_names): """Create a master USD shade file for an asset. For each available model variation this should generate a reference - to a `usdShade_{modelVariant}` subset. + to a `usdShade_{modelVariant}` product. """ project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + assert folder_entity, "Folder not found: %s" % folder_path variants = [] - for subset in variant_subsets: + for product_name in variant_product_names: prefix = "usdModel" - if subset.startswith(prefix): + if product_name.startswith(prefix): # Strip off `usdModel_` - variant = subset[len(prefix):] + variant = product_name[len(prefix):] else: raise ValueError( - "Model subsets must start " "with usdModel: %s" % subset + "Model products must start " "with usdModel: %s" % product_name ) - shade_subset = re.sub("^usdModel", "usdShade", subset) + shade_product_name = re.sub( + "^usdModel", "usdShade", product_name + ) path = get_usd_master_path( - asset=asset_doc, subset=shade_subset, representation="usd" + folder_entity=folder_entity, + product_name=shade_product_name, + representation="usd" ) variants.append((variant, path)) @@ -206,7 +213,7 @@ def create_shade(filename, asset, variant_subsets): stage.GetRootLayer().Save() -def create_shade_variation(filename, asset, model_variant, shade_variants): +def create_shade_variation(filename, folder_path, model_variant, shade_variants): """Create the master Shade file for a specific model variant. This should reference all shade variants for the specific model variant. @@ -214,16 +221,18 @@ def create_shade_variation(filename, asset, model_variant, shade_variants): """ project_name = get_current_project_name() - asset_doc = get_asset_by_name(project_name, asset) - assert asset_doc, "Asset not found: %s" % asset + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + assert folder_entity, "Folder not found: %s" % folder_path variants = [] for variant in shade_variants: - subset = "usdShade_{model}_{shade}".format( + product_name = "usdShade_{model}_{shade}".format( model=model_variant, shade=variant ) path = get_usd_master_path( - asset=asset_doc, subset=subset, representation="usd" + folder_entity=folder_entity, + product_name=product_name, + representation="usd" ) variants.append((variant, path)) @@ -306,55 +315,48 @@ def _create_variants_file( return stage -def get_usd_master_path(asset, subset, representation): - """Get the filepath for a .usd file of a subset. +def get_usd_master_path(folder_entity, product_name, representation): + """Get the filepath for a .usd file of a product. This will return the path to an unversioned master file generated by `usd_master_file.py`. + Args: + folder_entity (Union[str, dict]): Folder entity. + product_name (str): Product name. + representation (str): Representation name. """ project_name = get_current_project_name() - anatomy = Anatomy(project_name) - project_doc = get_project( - project_name, - fields=["name", "data.code"] - ) + project_entity = ayon_api.get_project(project_name) + anatomy = Anatomy(project_name, project_entity=project_entity) - if isinstance(asset, dict) and "name" in asset: - # Allow explicitly passing asset document - asset_doc = asset - else: - asset_doc = get_asset_by_name(project_name, asset, fields=["name"]) + template_data = get_template_data(project_entity, folder_entity) + template_data.update({ + "product": { + "name": product_name + }, + "subset": product_name, + "representation": representation, + "version": 0, # stub version zero + }) - template_obj = anatomy.templates_obj["publish"]["path"] - path = template_obj.format_strict( - { - "project": { - "name": project_name, - "code": project_doc.get("data", {}).get("code") - }, - "folder": { - "name": asset_doc["name"], - }, - "asset": asset_doc["name"], - "subset": subset, - "representation": representation, - "version": 0, # stub version zero - } + template_obj = anatomy.get_template_item( + "publish", "default", "path" ) + path = template_obj.format_strict(template_data) # Remove the version folder - subset_folder = os.path.dirname(os.path.dirname(path)) - master_folder = os.path.join(subset_folder, "master") - fname = "{0}.{1}".format(subset, representation) + product_folder = os.path.dirname(os.path.dirname(path)) + master_folder = os.path.join(product_folder, "master") + fname = "{0}.{1}".format(product_name, representation) return os.path.join(master_folder, fname).replace("\\", "/") def parse_avalon_uri(uri): - # URI Pattern: avalon://{asset}/{subset}.{ext} - pattern = r"avalon://(?P[^/.]*)/(?P[^/]*)\.(?P.*)" + # URI Pattern: avalon://{folder}/{product}.{ext} + pattern = r"avalon://(?P[^/.]*)/(?P[^/]*)\.(?P.*)" if uri.startswith("avalon://"): match = re.match(pattern, uri) if match: diff --git a/client/ayon_core/pipeline/workfile/__init__.py b/client/ayon_core/pipeline/workfile/__init__.py index 94ecc81bd6..36766e3a04 100644 --- a/client/ayon_core/pipeline/workfile/__init__.py +++ b/client/ayon_core/pipeline/workfile/__init__.py @@ -13,6 +13,11 @@ from .path_resolving import ( create_workdir_extra_folders, ) +from .utils import ( + should_use_last_workfile_on_launch, + should_open_workfiles_tool_on_launch, +) + from .build_workfile import BuildWorkfile @@ -30,5 +35,8 @@ __all__ = ( "create_workdir_extra_folders", + "should_use_last_workfile_on_launch", + "should_open_workfiles_tool_on_launch", + "BuildWorkfile", ) diff --git a/client/ayon_core/pipeline/workfile/build_workfile.py b/client/ayon_core/pipeline/workfile/build_workfile.py index 34d8ef0c8f..5ff8b21259 100644 --- a/client/ayon_core/pipeline/workfile/build_workfile.py +++ b/client/ayon_core/pipeline/workfile/build_workfile.py @@ -13,13 +13,8 @@ import re import collections import json -from ayon_core.client import ( - get_asset_by_name, - get_subsets, - get_last_versions, - get_representations, - get_linked_assets, -) +import ayon_api + from ayon_core.settings import get_project_settings from ayon_core.lib import ( filter_profiles, @@ -48,17 +43,11 @@ class BuildWorkfile: return self._log @staticmethod - def map_products_by_type(subset_docs): + def map_products_by_type(product_entities): products_by_type = collections.defaultdict(list) - for subset_doc in subset_docs: - product_type = subset_doc["data"].get("family") - if not product_type: - families = subset_doc["data"].get("families") - if not families: - continue - product_type = families[0] - - products_by_type[product_type].append(subset_doc) + for product_entity in product_entities: + product_type = product_entity["productType"] + products_by_type[product_type].append(product_entity) return products_by_type def process(self): @@ -76,21 +65,21 @@ class BuildWorkfile: def build_workfile(self): """Prepares and load containers into workfile. - Loads latest versions of current and linked assets to workfile by logic - stored in Workfile profiles from presets. Profiles are set by host, - filtered by current task name and used by families. + Loads latest versions of current and linked folders to workfile by + logic stored in Workfile profiles from presets. Profiles are set + by host, filtered by current task name and used by families. Each product type can specify representation names and loaders for representations and first available and successful loaded representation is returned as container. - At the end you'll get list of loaded containers per each asset. + At the end you'll get list of loaded containers per each folder. loaded_containers [{ - "asset_doc": , + "folder_entity": , "containers": [, , ...] }, { - "asset_doc": , + "folder_entity": , "containers": [, ...] }, { ... @@ -100,22 +89,21 @@ class BuildWorkfile: List[Dict[str, Any]]: Loaded containers during build. """ - from ayon_core.pipeline.context_tools import ( - get_current_project_name, - get_current_asset_name, - get_current_task_name, - ) + from ayon_core.pipeline.context_tools import get_current_context loaded_containers = [] - # Get current asset name and entity - project_name = get_current_project_name() - current_folder_path = get_current_asset_name() - current_asset_doc = get_asset_by_name( + # Get current folder and task entities + context = get_current_context() + project_name = context["project_name"] + current_folder_path = context["folder_path"] + current_task_name = context["task_name"] + + current_folder_entity = ayon_api.get_folder_by_path( project_name, current_folder_path ) - # Skip if asset was not found - if not current_asset_doc: + # Skip if folder was not found + if not current_folder_entity: print("Folder entity `{}` was not found".format( current_folder_path )) @@ -138,12 +126,9 @@ class BuildWorkfile: self.log.warning("There are no registered loaders.") return loaded_containers - # Get current task name - current_task_name = get_current_task_name() - # Load workfile presets for task self.build_presets = self.get_build_presets( - current_task_name, current_asset_doc + current_task_name, current_folder_entity["id"] ) # Skip if there are any presets for task @@ -180,45 +165,53 @@ class BuildWorkfile: "loading preset for it's linked folders." ).format(current_task_name)) - # Prepare assets to process by workfile presets - asset_docs = [] + # Prepare folders to process by workfile presets + folder_entities = [] current_folder_id = None if current_context_profiles: - # Add current asset entity if preset has current context set - asset_docs.append(current_asset_doc) - current_folder_id = current_asset_doc["_id"] + # Add current folder entity if preset has current context set + folder_entities.append(current_folder_entity) + current_folder_id = current_folder_entity["id"] if link_context_profiles: - # Find and append linked assets if preset has set linked mapping - link_assets = get_linked_assets(project_name, current_asset_doc) - if link_assets: - asset_docs.extend(link_assets) + # Find and append linked folders if preset has set linked mapping + linked_folder_entities = self._get_linked_folder_entities( + project_name, current_folder_entity["id"] + ) + if linked_folder_entities: + folder_entities.extend(linked_folder_entities) - # Skip if there are no assets. This can happen if only linked mapping - # is set and there are no links for his asset. - if not asset_docs: + # Skip if there are no folders. This can happen if only linked mapping + # is set and there are no links for his folder. + if not folder_entities: self.log.warning( - "Asset does not have linked assets. Nothing to process." + "Folder does not have linked folders. Nothing to process." ) return loaded_containers - # Prepare entities from database for assets - prepared_entities = self._collect_last_version_repres(asset_docs) + # Prepare entities from database for folders + prepared_entities = self._collect_last_version_repres( + folder_entities + ) # Load containers by prepared entities and presets - # - Current asset containers + # - Current folder containers if current_folder_id and current_folder_id in prepared_entities: current_context_data = prepared_entities.pop(current_folder_id) - loaded_data = self.load_containers_by_asset_data( - current_context_data, current_context_profiles, loaders_by_name + loaded_data = self.load_containers_by_folder_data( + current_context_data, + current_context_profiles, + loaders_by_name ) if loaded_data: loaded_containers.append(loaded_data) # - Linked assets container - for linked_asset_data in prepared_entities.values(): - loaded_data = self.load_containers_by_asset_data( - linked_asset_data, link_context_profiles, loaders_by_name + for linked_folder_data in prepared_entities.values(): + loaded_data = self.load_containers_by_folder_data( + linked_folder_data, + link_context_profiles, + loaders_by_name ) if loaded_data: loaded_containers.append(loaded_data) @@ -226,7 +219,7 @@ class BuildWorkfile: # Return list of loaded containers return loaded_containers - def get_build_presets(self, task_name, asset_doc): + def get_build_presets(self, task_name, folder_id): """ Returns presets to build workfile for task name. Presets are loaded for current project received by @@ -235,6 +228,7 @@ class BuildWorkfile: Args: task_name (str): Task name used for filtering build presets. + folder_id (str): Folder id. Returns: Dict[str, Any]: preset per entered task name @@ -245,10 +239,9 @@ class BuildWorkfile: get_current_project_name, ) + project_name = get_current_project_name() host_name = get_current_host_name() - project_settings = get_project_settings( - get_current_project_name() - ) + project_settings = get_project_settings(project_name) host_settings = project_settings.get(host_name) or {} # Get presets for host @@ -261,13 +254,15 @@ class BuildWorkfile: if not builder_profiles: return None - task_type = ( - asset_doc - .get("data", {}) - .get("tasks", {}) - .get(task_name, {}) - .get("type") + task_entity = ayon_api.get_task_by_name( + project_name, + folder_id, + task_name, ) + task_type = None + if task_entity: + task_type = task_entity["taskType"] + filter_data = { "task_types": task_type, "tasks": task_name @@ -320,9 +315,9 @@ class BuildWorkfile: ).format(json.dumps(profile, indent=4))) continue - # Check families - profile_families = profile.get("product_types") - if not profile_families: + # Check product types + profile_product_types = profile.get("product_types") + if not profile_product_types: self.log.warning(( "Build profile is missing families configuration: {0}" ).format(json.dumps(profile, indent=4))) @@ -339,7 +334,8 @@ class BuildWorkfile: # Prepare lowered families and representation names profile["product_types_lowered"] = [ - fam.lower() for fam in profile_families + product_type.lower() + for product_type in profile_product_types ] profile["repre_names_lowered"] = [ name.lower() for name in profile_repre_names @@ -349,7 +345,32 @@ class BuildWorkfile: return valid_profiles - def _prepare_profile_for_products(self, subset_docs, profiles): + def _get_linked_folder_entities(self, project_name, folder_id): + """Get linked folder entities for entered folder. + + Args: + project_name (str): Project name. + folder_id (str): Folder id. + + Returns: + list[dict[str, Any]]: Linked folder entities. + + """ + links = ayon_api.get_folder_links( + project_name, folder_id, link_direction="in" + ) + linked_folder_ids = { + link["entityId"] + for link in links + if link["entityType"] == "folder" + } + if not linked_folder_ids: + return [] + return list(ayon_api.get_folders( + project_name, folder_ids=linked_folder_ids + )) + + def _prepare_profile_for_products(self, product_entities, profiles): """Select profile for each product by it's data. Profiles are filtered for each product individually. @@ -360,7 +381,7 @@ class BuildWorkfile: matching profile. Args: - subset_docs (List[Dict[str, Any]]): Subset documents. + product_entities (List[Dict[str, Any]]): product entities. profiles (List[Dict[str, Any]]): Build profiles. Returns: @@ -368,10 +389,10 @@ class BuildWorkfile: """ # Prepare products - products_by_type = self.map_products_by_type(subset_docs) + products_by_type = self.map_products_by_type(product_entities) profiles_by_product_id = {} - for product_type, subset_docs in products_by_type.items(): + for product_type, product_entities in products_by_type.items(): product_type_low = product_type.lower() for profile in profiles: # Skip profile if does not contain product type @@ -387,46 +408,46 @@ class BuildWorkfile: profile_regexes = _profile_regexes # TODO prepare regex compilation - for subset_doc in subset_docs: + for product_entity in product_entities: # Verify regex filtering (optional) if profile_regexes: valid = False for pattern in profile_regexes: - if re.match(pattern, subset_doc["name"]): + if re.match(pattern, product_entity["name"]): valid = True break if not valid: continue - profiles_by_product_id[subset_doc["_id"]] = profile + profiles_by_product_id[product_entity["id"]] = profile # break profiles loop on finding the first matching profile break return profiles_by_product_id - def load_containers_by_asset_data( - self, asset_doc_data, build_profiles, loaders_by_name + def load_containers_by_folder_data( + self, linked_folder_data, build_profiles, loaders_by_name ): - """Load containers for entered asset entity by Build profiles. + """Load containers for entered folder entity by Build profiles. Args: - asset_doc_data (Dict[str, Any]): Prepared data with products, - last versions and representations for specific asset. + linked_folder_data (Dict[str, Any]): Prepared data with products, + last versions and representations for specific folder. build_profiles (Dict[str, Any]): Build profiles. loaders_by_name (Dict[str, LoaderPlugin]): Available loaders per name. Returns: - Dict[str, Any]: Output contains asset document + Dict[str, Any]: Output contains folder entity and loaded containers. """ # Make sure all data are not empty - if not asset_doc_data or not build_profiles or not loaders_by_name: + if not linked_folder_data or not build_profiles or not loaders_by_name: return - asset_doc = asset_doc_data["asset_doc"] + folder_entity = linked_folder_data["folder_entity"] valid_profiles = self._filter_build_profiles( build_profiles, loaders_by_name @@ -442,20 +463,20 @@ class BuildWorkfile: products_by_id = {} version_by_product_id = {} repres_by_version_id = {} - for product_id, in_data in asset_doc_data["subsets"].items(): - subset_doc = in_data["subset_doc"] - products_by_id[subset_doc["_id"]] = subset_doc + for product_id, in_data in linked_folder_data["products"].items(): + product_entity = in_data["product_entity"] + products_by_id[product_entity["id"]] = product_entity version_data = in_data["version"] - version_doc = version_data["version_doc"] - version_by_product_id[product_id] = version_doc - repres_by_version_id[version_doc["_id"]] = ( + version_entity = version_data["version_entity"] + version_by_product_id[product_id] = version_entity + repres_by_version_id[version_entity["id"]] = ( version_data["repres"] ) if not products_by_id: - self.log.warning("There are not products for folder {0}".format( - asset_doc["name"] + self.log.warning("There are not products for folder {}".format( + folder_entity["path"] )) return @@ -470,8 +491,8 @@ class BuildWorkfile: for product_id, profile in profiles_by_product_id.items(): profile_repre_names = profile["repre_names_lowered"] - version_doc = version_by_product_id[product_id] - version_id = version_doc["_id"] + version_entity = version_by_product_id[product_id] + version_id = version_entity["id"] repres = repres_by_version_id[version_id] for repre in repres: repre_name_low = repre["name"].lower() @@ -480,12 +501,12 @@ class BuildWorkfile: # DEBUG message msg = "Valid representations for Folder: `{}`".format( - asset_doc["name"] + folder_entity["path"] ) for product_id, repres in valid_repres_by_product_id.items(): - subset_doc = products_by_id[product_id] + product_entity = products_by_id[product_id] msg += "\n# Product Name/ID: `{}`/{}".format( - subset_doc["name"], product_id + product_entity["name"], product_id ) for repre in repres: msg += "\n## Repre name: `{}`".format(repre["name"]) @@ -498,7 +519,7 @@ class BuildWorkfile: ) return { - "asset_doc": asset_doc, + "folder_entity": folder_entity, "containers": containers } @@ -514,13 +535,13 @@ class BuildWorkfile: If product has representation matching representation name each loader is tried to load it until any is successful. If none of them was successful then next representation name is tried. - Subset process loop ends when any representation is loaded or + Product process loop ends when any representation is loaded or all matching representations were already tried. Args: repres_by_product_id (Dict[str, Dict[str, Any]]): Available representations mapped by their parent (product) id. - products_by_id (Dict[str, Dict[str, Any]]): Subset documents + products_by_id (Dict[str, Dict[str, Any]]): Product entities mapped by their id. profiles_by_product_id (Dict[str, Dict[str, Any]]): Build profiles mapped by product id. @@ -539,9 +560,9 @@ class BuildWorkfile: product_ids_ordered = [] for preset in build_presets: for product_type in preset["product_types"]: - for product_id, subset_doc in products_by_id.items(): + for product_id, product_entity in products_by_id.items(): # TODO 'families' is not available on product - families = subset_doc["data"].get("families") or [] + families = product_entity["data"].get("families") or [] if product_type not in families: continue @@ -596,7 +617,7 @@ class BuildWorkfile: try: container = load_container( loader, - repre["_id"], + repre["id"], name=product_name ) loaded_containers.append(container) @@ -628,11 +649,11 @@ class BuildWorkfile: return loaded_containers - def _collect_last_version_repres(self, asset_docs): - """Collect products, versions and representations for asset_entities. + def _collect_last_version_repres(self, folder_entities): + """Collect products, versions and representations for folder_entities. Args: - asset_docs (List[Dict[str, Any]]): Asset entities for which + folder_entities (List[Dict[str, Any]]): Folder entities for which want to find data. Returns: @@ -641,13 +662,13 @@ class BuildWorkfile: Example output: ``` { - {Asset ID}: { - "asset_doc": , - "subsets": { - {Subset ID}: { - "subset_doc": , + : { + "folder_entity": , + "products": { + : { + "product_entity": , "version": { - "version_doc": , + "version_entity": , "repres": [ , , ... ] @@ -658,68 +679,73 @@ class BuildWorkfile: }, ... } - output[folder_id]["subsets"][product_id]["version"]["repres"] + output[folder_id]["products"][product_id]["version"]["repres"] ``` """ from ayon_core.pipeline.context_tools import get_current_project_name output = {} - if not asset_docs: + if not folder_entities: return output - asset_docs_by_ids = { - asset_doc["_id"]: asset_doc - for asset_doc in asset_docs + folder_entities_by_id = { + folder_entity["id"]: folder_entity + for folder_entity in folder_entities } project_name = get_current_project_name() - subset_docs = list(get_subsets( - project_name, asset_ids=asset_docs_by_ids.keys() + product_entities = list(ayon_api.get_products( + project_name, folder_ids=folder_entities_by_id.keys() )) - subset_docs_by_id = { - subset_doc["_id"]: subset_doc - for subset_doc in subset_docs + product_entities_by_id = { + product_entity["id"]: product_entity + for product_entity in product_entities } - last_version_by_product_id = get_last_versions( - project_name, subset_docs_by_id.keys() + last_version_by_product_id = ayon_api.get_last_versions( + project_name, product_entities_by_id.keys() ) - last_version_docs_by_id = { - version["_id"]: version - for version in last_version_by_product_id.values() + last_version_entities_by_id = { + version_entity["id"]: version_entity + for version_entity in last_version_by_product_id.values() } - repre_docs = get_representations( - project_name, version_ids=last_version_docs_by_id.keys() + repre_entities = ayon_api.get_representations( + project_name, version_ids=last_version_entities_by_id.keys() ) - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = last_version_docs_by_id[version_id] + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + version_entity = last_version_entities_by_id[version_id] - product_id = version_doc["parent"] - subset_doc = subset_docs_by_id[product_id] + product_id = version_entity["productId"] + product_entity = product_entities_by_id[product_id] - folder_id = subset_doc["parent"] - asset_doc = asset_docs_by_ids[folder_id] + folder_id = product_entity["folderId"] + folder_entity = folder_entities_by_id[folder_id] if folder_id not in output: output[folder_id] = { - "asset_doc": asset_doc, - "subsets": {} + "folder_entity": folder_entity, + "products": {} } - if product_id not in output[folder_id]["subsets"]: - output[folder_id]["subsets"][product_id] = { - "subset_doc": subset_doc, + if product_id not in output[folder_id]["products"]: + output[folder_id]["products"][product_id] = { + "product_entity": product_entity, "version": { - "version_doc": version_doc, + "version_entity": version_entity, "repres": [] } } - output[folder_id]["subsets"][product_id]["version"]["repres"].append( - repre_doc - ) + ( + output + [folder_id] + ["products"] + [product_id] + ["version"] + ["repres"] + ).append(repre_entity) return output diff --git a/client/ayon_core/pipeline/workfile/path_resolving.py b/client/ayon_core/pipeline/workfile/path_resolving.py index 7718e32317..47d6f4ddfa 100644 --- a/client/ayon_core/pipeline/workfile/path_resolving.py +++ b/client/ayon_core/pipeline/workfile/path_resolving.py @@ -3,7 +3,8 @@ import re import copy import platform -from ayon_core.client import get_project, get_asset_by_name +import ayon_api + from ayon_core.settings import get_project_settings from ayon_core.lib import ( filter_profiles, @@ -15,7 +16,11 @@ from ayon_core.pipeline.template_data import get_template_data def get_workfile_template_key_from_context( - asset_name, task_name, host_name, project_name, project_settings=None + project_name, + folder_path, + task_name, + host_name, + project_settings=None ): """Helper function to get template key for workfile template. @@ -23,41 +28,39 @@ def get_workfile_template_key_from_context( context". Args: - asset_name(str): Name of asset document. - task_name(str): Task name for which is template key retrieved. - Must be available on asset document under `data.tasks`. - host_name(str): Name of host implementation for which is workfile - used. - project_name(str): Project name where asset and task is. - project_settings(Dict[str, Any]): Project settings for passed + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + host_name (str): Host name. + project_settings (Dict[str, Any]): Project settings for passed 'project_name'. Not required at all but makes function faster. """ - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["data.tasks"] + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} ) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + task_type = task_entity.get("type") return get_workfile_template_key( - task_type, host_name, project_name, project_settings + project_name, task_type, host_name, project_settings ) def get_workfile_template_key( - task_type, host_name, project_name, project_settings=None + project_name, task_type, host_name, project_settings=None ): """Workfile template key which should be used to get workfile template. Function is using profiles from project settings to return right template - for passet task type and host name. + for passed task type and host name. Args: - task_type(str): Name of task type. - host_name(str): Name of host implementation (e.g. "maya", "nuke", ...) - project_name(str): Name of project in which context should look for - settings. + project_name(str): Project name. + task_type(str): Task type. + host_name(str): Host name (e.g. "maya", "nuke", ...) project_settings(Dict[str, Any]): Prepared project settings for project name. Optional to make processing faster. """ @@ -126,13 +129,15 @@ def get_workdir_with_workdir_data( if not template_key: template_key = get_workfile_template_key( + workdir_data["project"]["name"], workdir_data["task"]["type"], workdir_data["app"], - workdir_data["project"]["name"], project_settings ) - template_obj = anatomy.templates_obj[template_key]["folder"] + template_obj = anatomy.get_template_item( + "work", template_key, "directory" + ) # Output is TemplateResult object which contain useful data output = template_obj.format_strict(workdir_data) if output: @@ -141,9 +146,9 @@ def get_workdir_with_workdir_data( def get_workdir( - project_doc, - asset_doc, - task_name, + project_entity, + folder_entity, + task_entity, host_name, anatomy=None, template_key=None, @@ -152,14 +157,14 @@ def get_workdir( """Fill workdir path from entered data and project's anatomy. Args: - project_doc (Dict[str, Any]): Mongo document of project from MongoDB. - asset_doc (Dict[str, Any]): Mongo document of asset from MongoDB. - task_name (str): Task name for which are workdir data preapred. + project_entity (Dict[str, Any]): Project entity. + folder_entity (Dict[str, Any]): Folder entity. + task_entity (dict[str, Any]): Task entity. host_name (str): Host which is used to workdir. This is required because workdir template may contain `{app}` key. In `Session` is stored under `AYON_HOST_NAME` key. anatomy (Anatomy): Optional argument. Anatomy object is created using - project name from `project_doc`. It is preferred to pass this + project name from `project_entity`. It is preferred to pass this argument as initialization of a new Anatomy object may be time consuming. template_key (str): Key of work templates in anatomy templates. Default @@ -173,10 +178,15 @@ def get_workdir( """ if not anatomy: - anatomy = Anatomy(project_doc["name"]) + anatomy = Anatomy( + project_entity["name"], project_entity=project_entity + ) workdir_data = get_template_data( - project_doc, asset_doc, task_name, host_name + project_entity, + folder_entity, + task_entity, + host_name, ) # Output is TemplateResult object which contain useful data return get_workdir_with_workdir_data( @@ -301,11 +311,12 @@ def get_last_workfile( Returns file with version 1 if there is not workfile yet. Args: - workdir(str): Path to dir where workfiles are stored. - file_template(str): Template of file name. - fill_data(Dict[str, Any]): Data for filling template. - extensions(Iterable[str]): All allowed file extensions of workfile. - full_path(bool): Full path to file is returned if set to True. + workdir (str): Path to dir where workfiles are stored. + file_template (str): Template of file name. + fill_data (Dict[str, Any]): Data for filling template. + extensions (Iterable[str]): All allowed file extensions of workfile. + full_path (Optional[bool]): Full path to file is returned if + set to True. Returns: str: Last or first workfile as filename of full path to filename. @@ -326,7 +337,7 @@ def get_last_workfile( data.pop("comment", None) if not data.get("ext"): data["ext"] = extensions[0] - data["ext"] = data["ext"].replace('.', '') + data["ext"] = data["ext"].lstrip(".") filename = StringTemplate.format_strict_template(file_template, data) if full_path: @@ -336,9 +347,9 @@ def get_last_workfile( def get_custom_workfile_template( - project_doc, - asset_doc, - task_name, + project_entity, + folder_entity, + task_entity, host_name, anatomy=None, project_settings=None @@ -356,13 +367,13 @@ def get_custom_workfile_template( points to a file which is copied as first workfile - It is expected that passed argument are already queried documents of - project and asset as parents of processing task name. + It is expected that passed argument are already queried entities of + project and folder as parents of processing task name. Args: - project_doc (Dict[str, Any]): Project document from MongoDB. - asset_doc (Dict[str, Any]): Asset document from MongoDB. - task_name (str): Name of task for which templates are filtered. + project_entity (Dict[str, Any]): Project entity. + folder_entity (Dict[str, Any]): Folder entity. + task_entity (Dict[str, Any]): Task entity. host_name (str): Name of host. anatomy (Anatomy): Optionally passed anatomy object for passed project name. @@ -376,7 +387,7 @@ def get_custom_workfile_template( log = Logger.get_logger("CustomWorkfileResolve") - project_name = project_doc["name"] + project_name = project_entity["name"] if project_settings is None: project_settings = get_project_settings(project_name) @@ -411,9 +422,9 @@ def get_custom_workfile_template( if anatomy is None: anatomy = Anatomy(project_name) - # get project, asset, task anatomy context data + # get project, folder, task anatomy context data anatomy_context_data = get_template_data( - project_doc, asset_doc, task_name, host_name + project_entity, folder_entity, task_entity, host_name ) # add root dict anatomy_context_data["root"] = anatomy.roots @@ -444,7 +455,7 @@ def get_custom_workfile_template( def get_custom_workfile_template_by_string_context( project_name, - asset_name, + folder_path, task_name, host_name, anatomy=None, @@ -452,30 +463,38 @@ def get_custom_workfile_template_by_string_context( ): """Filter and fill workfile template profiles by passed context. - Passed context are string representations of project, asset and task. - Function will query documents of project and asset to be able use + Passed context are string representations of project, folder and task. + Function will query documents of project and folder to be able to use `get_custom_workfile_template` for rest of logic. Args: - project_name(str): Project name. - asset_name(str): Asset name. - task_name(str): Task name. + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. host_name (str): Name of host. - anatomy(Anatomy): Optionally prepared anatomy object for passed + anatomy (Anatomy): Optionally prepared anatomy object for passed project. - project_settings(Dict[str, Any]): Preloaded project settings. + project_settings (Dict[str, Any]): Preloaded project settings. Returns: - str: Path to template or None if none of profiles match current - context. (Existence of formatted path is not validated.) - None: If no profile is matching context. + Union[str, None]: Path to template or None if none of profiles match + current context. (Existence of formatted path is not validated.) + """ - project_doc = get_project(project_name) - asset_doc = get_asset_by_name(project_name, asset_name) + project_entity = ayon_api.get_project(project_name) + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) return get_custom_workfile_template( - project_doc, asset_doc, task_name, host_name, anatomy, project_settings + project_entity, + folder_entity, + task_entity, + host_name, + anatomy, + project_settings ) diff --git a/client/ayon_core/pipeline/workfile/utils.py b/client/ayon_core/pipeline/workfile/utils.py new file mode 100644 index 0000000000..53de3269b2 --- /dev/null +++ b/client/ayon_core/pipeline/workfile/utils.py @@ -0,0 +1,121 @@ +from ayon_core.lib import filter_profiles +from ayon_core.settings import get_project_settings + + +def should_use_last_workfile_on_launch( + project_name, + host_name, + task_name, + task_type, + default_output=False, + project_settings=None, +): + """Define if host should start last version workfile if possible. + + Default output is `False`. Can be overridden with environment variable + `AYON_OPEN_LAST_WORKFILE`, valid values without case sensitivity are + `"0", "1", "true", "false", "yes", "no"`. + + Args: + project_name (str): Name of project. + host_name (str): Name of host which is launched. In avalon's + application context it's value stored in app definition under + key `"application_dir"`. Is not case sensitive. + task_name (str): Name of task which is used for launching the host. + Task name is not case sensitive. + task_type (str): Task type. + default_output (Optional[bool]): Default output value if no profile + is found. + project_settings (Optional[dict[str, Any]]): Project settings. + + Returns: + bool: True if host should start workfile. + + """ + if project_settings is None: + project_settings = get_project_settings(project_name) + profiles = ( + project_settings + ["core"] + ["tools"] + ["Workfiles"] + ["last_workfile_on_startup"] + ) + + if not profiles: + return default_output + + filter_data = { + "tasks": task_name, + "task_types": task_type, + "hosts": host_name + } + matching_item = filter_profiles(profiles, filter_data) + + output = None + if matching_item: + output = matching_item.get("enabled") + + if output is None: + return default_output + return output + + +def should_open_workfiles_tool_on_launch( + project_name, + host_name, + task_name, + task_type, + default_output=False, + project_settings=None, +): + """Define if host should start workfile tool at host launch. + + Default output is `False`. Can be overridden with environment variable + `AYON_WORKFILE_TOOL_ON_START`, valid values without case sensitivity are + `"0", "1", "true", "false", "yes", "no"`. + + Args: + project_name (str): Name of project. + host_name (str): Name of host which is launched. In avalon's + application context it's value stored in app definition under + key `"application_dir"`. Is not case sensitive. + task_name (str): Name of task which is used for launching the host. + Task name is not case sensitive. + task_type (str): Task type. + default_output (Optional[bool]): Default output value if no profile + is found. + project_settings (Optional[dict[str, Any]]): Project settings. + + Returns: + bool: True if host should start workfile. + + """ + + if project_settings is None: + project_settings = get_project_settings(project_name) + profiles = ( + project_settings + ["core"] + ["tools"] + ["Workfiles"] + ["open_workfile_tool_on_startup"] + ) + + if not profiles: + return default_output + + filter_data = { + "tasks": task_name, + "task_types": task_type, + "hosts": host_name + } + matching_item = filter_profiles(profiles, filter_data) + + output = None + if matching_item: + output = matching_item.get("enabled") + + if output is None: + return default_output + return output diff --git a/client/ayon_core/pipeline/workfile/workfile_template_builder.py b/client/ayon_core/pipeline/workfile/workfile_template_builder.py index c889e0cafb..8082adc65d 100644 --- a/client/ayon_core/pipeline/workfile/workfile_template_builder.py +++ b/client/ayon_core/pipeline/workfile/workfile_template_builder.py @@ -18,15 +18,16 @@ import copy from abc import ABCMeta, abstractmethod import six -from ayon_api import get_products, get_last_versions -from ayon_api.graphql_queries import folders_graphql_query - -from ayon_core.client import ( - get_asset_by_name, - get_linked_assets, +from ayon_api import ( + get_folders, + get_folder_by_path, + get_folder_links, + get_task_by_name, + get_products, + get_last_versions, get_representations, - get_ayon_server_api_connection, ) + from ayon_core.settings import get_project_settings from ayon_core.host import IWorkfileHost, HostBase from ayon_core.lib import ( @@ -39,7 +40,7 @@ from ayon_core.lib.attribute_definitions import get_attributes_keys from ayon_core.pipeline import Anatomy from ayon_core.pipeline.load import ( get_loaders_by_name, - get_contexts_for_repre_docs, + get_representation_contexts, load_with_repre_context, ) @@ -48,6 +49,8 @@ from ayon_core.pipeline.create import ( CreateContext, ) +_NOT_SET = object() + class TemplateNotFound(Exception): """Exception raised when template does not exist.""" @@ -117,9 +120,9 @@ class AbstractTemplateBuilder(object): self._project_settings = None - self._current_asset_doc = None - self._linked_asset_docs = None - self._task_type = None + self._current_folder_entity = _NOT_SET + self._current_task_entity = _NOT_SET + self._linked_folder_entities = _NOT_SET @property def project_name(self): @@ -128,9 +131,9 @@ class AbstractTemplateBuilder(object): return os.getenv("AYON_PROJECT_NAME") @property - def current_asset_name(self): + def current_folder_path(self): if isinstance(self._host, HostBase): - return self._host.get_current_asset_name() + return self._host.get_current_folder_path() return os.getenv("AYON_FOLDER_PATH") @property @@ -144,7 +147,7 @@ class AbstractTemplateBuilder(object): return self._host.get_current_context() return { "project_name": self.project_name, - "folder_path": self.current_asset_name, + "folder_path": self.current_folder_path, "task_name": self.current_task_name } @@ -155,33 +158,39 @@ class AbstractTemplateBuilder(object): return self._project_settings @property - def current_asset_doc(self): - if self._current_asset_doc is None: - self._current_asset_doc = get_asset_by_name( - self.project_name, self.current_asset_name + def current_folder_entity(self): + if self._current_folder_entity is _NOT_SET: + self._current_folder_entity = get_folder_by_path( + self.project_name, self.current_folder_path ) - return self._current_asset_doc + return self._current_folder_entity @property - def linked_asset_docs(self): - if self._linked_asset_docs is None: - self._linked_asset_docs = get_linked_assets( - self.project_name, self.current_asset_doc - ) - return self._linked_asset_docs + def linked_folder_entities(self): + if self._linked_folder_entities is _NOT_SET: + self._linked_folder_entities = self._get_linked_folder_entities() + return self._linked_folder_entities + + @property + def current_task_entity(self): + if self._current_task_entity is _NOT_SET: + task_entity = None + folder_entity = self.current_folder_entity + if folder_entity: + task_entity = get_task_by_name( + self.project_name, + folder_entity["id"], + self.current_task_name + ) + self._current_task_entity = task_entity + return self._current_task_entity @property def current_task_type(self): - asset_doc = self.current_asset_doc - if not asset_doc: - return None - return ( - asset_doc - .get("data", {}) - .get("tasks", {}) - .get(self.current_task_name, {}) - .get("type") - ) + task_entity = self.current_task_entity + if task_entity: + return task_entity["taskType"] + return None @property def create_context(self): @@ -242,9 +251,9 @@ class AbstractTemplateBuilder(object): self._loaders_by_name = None self._creators_by_name = None - self._current_asset_doc = None - self._linked_asset_docs = None - self._task_type = None + self._current_folder_entity = _NOT_SET + self._current_task_entity = _NOT_SET + self._linked_folder_entities = _NOT_SET self._project_settings = None @@ -256,6 +265,22 @@ class AbstractTemplateBuilder(object): self._loaders_by_name = get_loaders_by_name() return self._loaders_by_name + def _get_linked_folder_entities(self): + project_name = self.project_name + folder_entity = self.current_folder_entity + if not folder_entity: + return [] + links = get_folder_links( + project_name, folder_entity["id"], link_direction="in" + ) + linked_folder_ids = { + link["entityId"] + for link in links + if link["entityType"] == "folder" + } + + return list(get_folders(project_name, folder_ids=linked_folder_ids)) + def _collect_legacy_creators(self): creators_by_name = {} for creator in discover_legacy_creator_plugins(): @@ -304,9 +329,9 @@ class AbstractTemplateBuilder(object): is good practice to check if the same value is not already stored under different key or if the key is not already used for something else. - Key should be self explanatory to content. - - wrong: 'asset' - - good: 'asset_name' + Key should be self-explanatory to content. + - wrong: 'folder' + - good: 'folder_name' Args: key (str): Key under which is key stored. @@ -350,9 +375,9 @@ class AbstractTemplateBuilder(object): is good practice to check if the same value is not already stored under different key or if the key is not already used for something else. - Key should be self explanatory to content. - - wrong: 'asset' - - good: 'asset_name' + Key should be self-explanatory to content. + - wrong: 'folder' + - good: 'folder_path' Args: key (str): Key under which is key stored. @@ -370,9 +395,9 @@ class AbstractTemplateBuilder(object): is good practice to check if the same value is not already stored under different key or if the key is not already used for something else. - Key should be self explanatory to content. - - wrong: 'asset' - - good: 'asset_name' + Key should be self-explanatory to content. + - wrong: 'folder' + - good: 'folder_path' Args: key (str): Key under which is key stored. @@ -441,7 +466,7 @@ class AbstractTemplateBuilder(object): return list(sorted( placeholders, - key=lambda i: i.order + key=lambda placeholder: placeholder.order )) def build_template( @@ -660,7 +685,7 @@ class AbstractTemplateBuilder(object): for placeholder in placeholders } all_processed = len(placeholders) == 0 - # Counter is checked at the ned of a loop so the loop happens at least + # Counter is checked at the end of a loop so the loop happens at least # once. iter_counter = 0 while not all_processed: @@ -1020,9 +1045,9 @@ class PlaceholderPlugin(object): Using shared data from builder but stored under plugin identifier. - Key should be self explanatory to content. - - wrong: 'asset' - - good: 'asset_name' + Key should be self-explanatory to content. + - wrong: 'folder' + - good: 'folder_path' Args: key (str): Key under which is key stored. @@ -1060,9 +1085,9 @@ class PlaceholderPlugin(object): Using shared data from builder but stored under plugin identifier. - Key should be self explanatory to content. - - wrong: 'asset' - - good: 'asset_name' + Key should be self-explanatory to content. + - wrong: 'folder' + - good: 'folder_path' Shared populate data are cleaned up during populate while loop. @@ -1082,10 +1107,10 @@ class PlaceholderItem(object): """Item representing single item in scene that is a placeholder to process. Items are always created and updated by their plugins. Each plugin can use - modified class of 'PlacehoderItem' but only to add more options instead of + modified class of 'PlaceholderItem' but only to add more options instead of new other. - Scene identifier is used to avoid processing of the palceholder item + Scene identifier is used to avoid processing of the placeholder item multiple times so must be unique across whole workfile builder. Args: @@ -1137,7 +1162,7 @@ class PlaceholderItem(object): """Placeholder data which can modify how placeholder is processed. Possible general keys - - order: Can define the order in which is palceholder processed. + - order: Can define the order in which is placeholder processed. Lower == earlier. Other keys are defined by placeholder and should validate them on item @@ -1239,11 +1264,9 @@ class PlaceholderLoadMixin(object): """Unified attribute definitions for load placeholder. Common function for placeholder plugins used for loading of - repsentations. Use it in 'get_placeholder_options'. + representations. Use it in 'get_placeholder_options'. Args: - plugin (PlaceholderPlugin): Plugin used for loading of - representations. options (Dict[str, Any]): Already available options which are used as defaults for attributes. @@ -1261,14 +1284,14 @@ class PlaceholderLoadMixin(object): loader_items = list(sorted(loader_items, key=lambda i: i["label"])) options = options or {} - # Get families from all loaders excluding "*" - families = set() + # Get product types from all loaders excluding "*" + product_types = set() for loader in loaders_by_name.values(): - families.update(loader.families) - families.discard("*") + product_types.update(loader.product_types) + product_types.discard("*") # Sort for readability - families = list(sorted(families)) + product_types = list(sorted(product_types)) builder_type_enum_items = [ {"label": "Current folder", "value": "context_folder"}, @@ -1308,7 +1331,7 @@ class PlaceholderLoadMixin(object): "product_type", label="Product type", default=product_type, - items=families + items=product_types ), attribute_definitions.TextDef( "representation", @@ -1323,7 +1346,7 @@ class PlaceholderLoadMixin(object): items=loader_items, tooltip=( "Loader" - "\nDefines what OpenPype loader will be used to" + "\nDefines what AYON loader will be used to" " load assets." "\nUseable loader depends on current host's loader list." "\nField is case sensitive." @@ -1408,50 +1431,6 @@ class PlaceholderLoadMixin(object): return {} - def _query_by_folder_regex(self, project_name, folder_regex): - """Query folders by folder path regex. - - WARNING: - This method will be removed once the same functionality is - available in ayon-python-api. - - Args: - project_name (str): Project name. - folder_regex (str): Regex for folder path. - - Returns: - list[str]: List of folder paths. - """ - - query = folders_graphql_query({"id"}) - - folders_field = None - for child in query._children: - if child.path != "project": - continue - - for project_child in child._children: - if project_child.path == "project/folders": - folders_field = project_child - break - if folders_field: - break - - if "folderPathRegex" not in query._variables: - folder_path_regex_var = query.add_variable( - "folderPathRegex", "String!" - ) - folders_field.set_filter("pathEx", folder_path_regex_var) - - query.set_variable_value("projectName", project_name) - if folder_regex: - query.set_variable_value("folderPathRegex", folder_regex) - - api = get_ayon_server_api_connection() - for parsed_data in query.continuous_query(api): - for folder in parsed_data["project"]["folders"]: - yield folder["id"] - def _get_representations(self, placeholder): """Prepared query of representations based on load options. @@ -1461,7 +1440,7 @@ class PlaceholderLoadMixin(object): Note: This returns all representation documents from all versions of matching product. To filter for last version use - '_reduce_last_version_repre_docs'. + '_reduce_last_version_repre_entities'. Args: placeholder (PlaceholderItem): Item which should be populated. @@ -1480,24 +1459,31 @@ class PlaceholderLoadMixin(object): return [] project_name = self.builder.project_name - current_asset_doc = self.builder.current_asset_doc + current_folder_entity = self.builder.current_folder_entity folder_path_regex = placeholder.data["folder_path"] product_name_regex_value = placeholder.data["product_name"] product_name_regex = None if product_name_regex_value: product_name_regex = re.compile(product_name_regex_value) - product_type = placeholder.data["family"] + product_type = placeholder.data.get("product_type") + if product_type is None: + product_type = placeholder.data["family"] builder_type = placeholder.data["builder_type"] folder_ids = [] if builder_type == "context_folder": - folder_ids = [current_asset_doc["_id"]] + folder_ids = [current_folder_entity["id"]] elif builder_type == "all_folders": - folder_ids = list(self._query_by_folder_regex( - project_name, folder_path_regex - )) + folder_ids = { + folder_entity["id"] + for folder_entity in get_folders( + project_name, + folder_path_regex=folder_path_regex, + fields={"id"} + ) + } if not folder_ids: return [] @@ -1511,8 +1497,8 @@ class PlaceholderLoadMixin(object): filtered_product_ids = set() for product in products: if ( - product_name_regex is None - or product_name_regex.match(product["name"]) + product_name_regex is None + or product_name_regex.match(product["name"]) ): filtered_product_ids.add(product["id"]) @@ -1527,7 +1513,7 @@ class PlaceholderLoadMixin(object): ) return list(get_representations( project_name, - representation_names=[representation_name], + representation_names={representation_name}, version_ids=version_ids )) @@ -1543,32 +1529,22 @@ class PlaceholderLoadMixin(object): pass - def _reduce_last_version_repre_docs(self, representations): - """Reduce representations to last verison.""" + def _reduce_last_version_repre_entities(self, repre_contexts): + """Reduce representations to last version.""" - mapping = {} - for repre_doc in representations: - repre_context = repre_doc["context"] - - asset_name = repre_context["asset"] - product_name = repre_context["subset"] - version = repre_context.get("version", -1) - - if asset_name not in mapping: - mapping[asset_name] = {} - - product_mapping = mapping[asset_name] - if product_name not in product_mapping: - product_mapping[product_name] = collections.defaultdict(list) - - version_mapping = product_mapping[product_name] - version_mapping[version].append(repre_doc) + version_mapping_by_product_id = {} + for repre_context in repre_contexts: + product_id = repre_context["product"]["id"] + version = repre_context["version"]["version"] + version_mapping = version_mapping_by_product_id.setdefault( + product_id, {} + ) + version_mapping.setdefault(version, []).append(repre_context) output = [] - for product_mapping in mapping.values(): - for version_mapping in product_mapping.values(): - last_version = tuple(sorted(version_mapping.keys()))[-1] - output.extend(version_mapping[last_version]) + for version_mapping in version_mapping_by_product_id.values(): + last_version = max(version_mapping.keys()) + output.extend(version_mapping[last_version]) return output def populate_load_placeholder(self, placeholder, ignore_repre_ids=None): @@ -1596,42 +1572,44 @@ class PlaceholderLoadMixin(object): loader_name = placeholder.data["loader"] loader_args = self.parse_loader_args(placeholder.data["loader_args"]) - placeholder_representations = self._get_representations(placeholder) + placeholder_representations = [ + repre_entity + for repre_entity in self._get_representations(placeholder) + if repre_entity["id"] not in ignore_repre_ids + ] - filtered_representations = [] - for representation in self._reduce_last_version_repre_docs( - placeholder_representations - ): - repre_id = str(representation["_id"]) - if repre_id not in ignore_repre_ids: - filtered_representations.append(representation) - - if not filtered_representations: + repre_load_contexts = get_representation_contexts( + self.project_name, placeholder_representations + ) + filtered_repre_contexts = self._reduce_last_version_repre_entities( + repre_load_contexts.values() + ) + if not filtered_repre_contexts: self.log.info(( "There's no representation for this placeholder: {}" ).format(placeholder.scene_identifier)) + if not placeholder.data.get("keep_placeholder", True): + self.delete_placeholder(placeholder) return - repre_load_contexts = get_contexts_for_repre_docs( - self.project_name, filtered_representations - ) loaders_by_name = self.builder.get_loaders_by_name() self._before_placeholder_load( placeholder ) failed = False - for repre_load_context in repre_load_contexts.values(): + for repre_load_context in filtered_repre_contexts: + folder_path = repre_load_context["folder"]["path"] + product_name = repre_load_context["product"]["name"] representation = repre_load_context["representation"] - repre_context = representation["context"] self._before_repre_load( placeholder, representation ) self.log.info( "Loading {} from {} with loader {}\n" "Loader arguments used : {}".format( - repre_context["subset"], - repre_context["asset"], + product_name, + folder_path, loader_name, placeholder.data["loader_args"], ) @@ -1705,8 +1683,6 @@ class PlaceholderCreateMixin(object): publishable instances. Use it with 'get_placeholder_options'. Args: - plugin (PlaceholderPlugin): Plugin used for creating of - publish instances. options (Dict[str, Any]): Already available options which are used as defaults for attributes. @@ -1736,7 +1712,7 @@ class PlaceholderCreateMixin(object): items=creator_items, tooltip=( "Creator" - "\nDefines what OpenPype creator will be used to" + "\nDefines what AYON creator will be used to" " create publishable instance." "\nUseable creator depends on current host's creator list." "\nField is case sensitive." @@ -1788,31 +1764,24 @@ class PlaceholderCreateMixin(object): # create product name context = self._builder.get_current_context() project_name = context["project_name"] - asset_name = context["folder_path"] + folder_path = context["folder_path"] task_name = context["task_name"] + host_name = self.builder.host_name - if legacy_create: - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["_id"] - ) - assert asset_doc, "No current asset found in Session" - product_name = creator_plugin.get_product_name( - project_name, - asset_doc["_id"], - task_name, - create_variant, - ) + folder_entity = get_folder_by_path(project_name, folder_path) + if not folder_entity: + raise ValueError("Current context does not have set folder") + task_entity = get_task_by_name( + project_name, folder_entity["id"], task_name + ) - else: - asset_doc = get_asset_by_name(project_name, asset_name) - assert asset_doc, "No current asset found in Session" - product_name = creator_plugin.get_product_name( - project_name, - asset_doc, - task_name, - create_variant, - self.builder.host_name - ) + product_name = creator_plugin.get_product_name( + project_name, + folder_entity, + task_entity, + create_variant, + host_name + ) creator_data = { "creator_name": creator_name, @@ -1828,13 +1797,13 @@ class PlaceholderCreateMixin(object): if legacy_create: creator_instance = creator_plugin( product_name, - asset_name + folder_path ).process() else: creator_instance = self.builder.create_context.create( creator_plugin.identifier, create_variant, - asset_doc, + folder_entity, task_name=task_name, pre_create_data=pre_create_data ) @@ -1882,7 +1851,7 @@ class PlaceholderCreateMixin(object): self.log.debug("Clean up of placeholder is not implemented.") def _before_instance_create(self, placeholder): - """Can be overriden. Is called before instance is created.""" + """Can be overridden. Is called before instance is created.""" pass diff --git a/client/ayon_core/plugins/actions/open_file_explorer.py b/client/ayon_core/plugins/actions/open_file_explorer.py index fba3c231a5..50a3107444 100644 --- a/client/ayon_core/plugins/actions/open_file_explorer.py +++ b/client/ayon_core/plugins/actions/open_file_explorer.py @@ -1,12 +1,8 @@ import os import platform import subprocess - from string import Formatter -from ayon_core.client import ( - get_project, - get_asset_by_name, -) + from ayon_core.pipeline import ( Anatomy, LauncherAction, @@ -20,18 +16,14 @@ class OpenTaskPath(LauncherAction): icon = "folder-open" order = 500 - def is_compatible(self, session): + def is_compatible(self, selection): """Return whether the action is compatible with the session""" - return bool(session.get("AYON_FOLDER_PATH")) + return selection.is_folder_selected - def process(self, session, **kwargs): + def process(self, selection, **kwargs): from qtpy import QtCore, QtWidgets - project_name = session["AYON_PROJECT_NAME"] - asset_name = session["AYON_FOLDER_PATH"] - task_name = session.get("AYON_TASK_NAME", None) - - path = self._get_workdir(project_name, asset_name, task_name) + path = self._get_workdir(selection) if not path: return @@ -62,14 +54,20 @@ class OpenTaskPath(LauncherAction): path = path.split(field, 1)[0] return path - def _get_workdir(self, project_name, asset_name, task_name): - project = get_project(project_name) - asset = get_asset_by_name(project_name, asset_name) + def _get_workdir(self, selection): + data = get_template_data( + selection.project_entity, + selection.folder_entity, + selection.task_entity + ) - data = get_template_data(project, asset, task_name) - - anatomy = Anatomy(project_name) - workdir = anatomy.templates_obj["work"]["folder"].format(data) + anatomy = Anatomy( + selection.project_name, + project_entity=selection.project_entity + ) + workdir = anatomy.get_template_item( + "work", "default", "folder" + ).format(data) # Remove any potential un-formatted parts of the path valid_workdir = self._find_first_filled_path(workdir) @@ -84,7 +82,9 @@ class OpenTaskPath(LauncherAction): return valid_workdir data.pop("task", None) - workdir = anatomy.templates_obj["work"]["folder"].format(data) + workdir = anatomy.get_template_item( + "work", "default", "folder" + ).format(data) valid_workdir = self._find_first_filled_path(workdir) if valid_workdir: # Normalize diff --git a/client/ayon_core/plugins/inventory/remove_and_load.py b/client/ayon_core/plugins/inventory/remove_and_load.py index 5529090b42..6553f9a7b3 100644 --- a/client/ayon_core/plugins/inventory/remove_and_load.py +++ b/client/ayon_core/plugins/inventory/remove_and_load.py @@ -1,12 +1,12 @@ -from ayon_core.pipeline import InventoryAction -from ayon_core.pipeline import get_current_project_name +import ayon_api + +from ayon_core.pipeline import get_current_project_name, InventoryAction from ayon_core.pipeline.load.plugins import discover_loader_plugins from ayon_core.pipeline.load.utils import ( get_loader_identifier, remove_container, load_container, ) -from ayon_core.client import get_representation_by_id class RemoveAndLoad(InventoryAction): @@ -21,6 +21,7 @@ class RemoveAndLoad(InventoryAction): get_loader_identifier(plugin): plugin for plugin in discover_loader_plugins(project_name=project_name) } + repre_ids = set() for container in containers: # Get loader loader_name = container["loader"] @@ -30,16 +31,23 @@ class RemoveAndLoad(InventoryAction): "Failed to get loader '{}', can't remove " "and load container".format(loader_name) ) + repre_ids.add(container["representation"]) - # Get representation - representation = get_representation_by_id( - project_name, container["representation"] + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in ayon_api.get_representations( + project_name, representation_ids=repre_ids ) - if not representation: + } + for container in containers: + # Get representation + repre_id = container["representation"] + repre_entity = repre_entities_by_id.get(repre_id) + if not repre_entity: self.log.warning( "Skipping remove and load because representation id is not" " found in database: '{}'".format( - container["representation"] + repre_id ) ) continue @@ -48,4 +56,4 @@ class RemoveAndLoad(InventoryAction): remove_container(container) # Load container - load_container(loader, representation) + load_container(loader, repre_entity) diff --git a/client/ayon_core/plugins/load/copy_file.py b/client/ayon_core/plugins/load/copy_file.py index 0da22826f0..08dad03be3 100644 --- a/client/ayon_core/plugins/load/copy_file.py +++ b/client/ayon_core/plugins/load/copy_file.py @@ -5,8 +5,8 @@ from ayon_core.pipeline import load class CopyFile(load.LoaderPlugin): """Copy the published file to be pasted at the desired location""" - representations = ["*"] - families = ["*"] + representations = {"*"} + product_types = {"*"} label = "Copy File" order = 10 diff --git a/client/ayon_core/plugins/load/copy_file_path.py b/client/ayon_core/plugins/load/copy_file_path.py index c3478c32f3..fdf31b5e02 100644 --- a/client/ayon_core/plugins/load/copy_file_path.py +++ b/client/ayon_core/plugins/load/copy_file_path.py @@ -5,8 +5,8 @@ from ayon_core.pipeline import load class CopyFilePath(load.LoaderPlugin): """Copy published file path to clipboard""" - representations = ["*"] - families = ["*"] + representations = {"*"} + product_types = {"*"} label = "Copy File Path" order = 20 diff --git a/client/ayon_core/plugins/load/delete_old_versions.py b/client/ayon_core/plugins/load/delete_old_versions.py index 4fc61ebb8b..62302e7123 100644 --- a/client/ayon_core/plugins/load/delete_old_versions.py +++ b/client/ayon_core/plugins/load/delete_old_versions.py @@ -1,497 +1,426 @@ -# TODO This plugin is not converted for AYON +import collections +import os +import uuid -# import collections -# import os -# import uuid -# -# import clique -# from pymongo import UpdateOne -# import qargparse -# from qtpy import QtWidgets, QtCore -# -# from ayon_core import style -# from ayon_core.client import get_versions, get_representations -# from ayon_core.addon import AddonsManager -# from ayon_core.lib import format_file_size -# from ayon_core.pipeline import load, Anatomy -# from ayon_core.pipeline.load import ( -# get_representation_path_with_anatomy, -# InvalidRepresentationContext, -# ) -# -# -# class DeleteOldVersions(load.SubsetLoaderPlugin): -# """Deletes specific number of old version""" -# -# is_multiple_contexts_compatible = True -# sequence_splitter = "__sequence_splitter__" -# -# representations = ["*"] -# families = ["*"] -# tool_names = ["library_loader"] -# -# label = "Delete Old Versions" -# order = 35 -# icon = "trash" -# color = "#d8d8d8" -# -# options = [ -# qargparse.Integer( -# "versions_to_keep", default=2, min=0, help="Versions to keep:" -# ), -# qargparse.Boolean( -# "remove_publish_folder", help="Remove publish folder:" -# ) -# ] -# -# def delete_whole_dir_paths(self, dir_paths, delete=True): -# size = 0 -# -# for dir_path in dir_paths: -# # Delete all files and fodlers in dir path -# for root, dirs, files in os.walk(dir_path, topdown=False): -# for name in files: -# file_path = os.path.join(root, name) -# size += os.path.getsize(file_path) -# if delete: -# os.remove(file_path) -# self.log.debug("Removed file: {}".format(file_path)) -# -# for name in dirs: -# if delete: -# os.rmdir(os.path.join(root, name)) -# -# if not delete: -# continue -# -# # Delete even the folder and it's parents folders if they are empty -# while True: -# if not os.path.exists(dir_path): -# dir_path = os.path.dirname(dir_path) -# continue -# -# if len(os.listdir(dir_path)) != 0: -# break -# -# os.rmdir(os.path.join(dir_path)) -# -# return size -# -# def path_from_representation(self, representation, anatomy): -# try: -# context = representation["context"] -# except KeyError: -# return (None, None) -# -# try: -# path = get_representation_path_with_anatomy( -# representation, anatomy -# ) -# except InvalidRepresentationContext: -# return (None, None) -# -# sequence_path = None -# if "frame" in context: -# context["frame"] = self.sequence_splitter -# sequence_path = get_representation_path_with_anatomy( -# representation, anatomy -# ) -# -# if sequence_path: -# sequence_path = sequence_path.normalized() -# -# return (path.normalized(), sequence_path) -# -# def delete_only_repre_files(self, dir_paths, file_paths, delete=True): -# size = 0 -# -# for dir_id, dir_path in dir_paths.items(): -# dir_files = os.listdir(dir_path) -# collections, remainders = clique.assemble(dir_files) -# for file_path, seq_path in file_paths[dir_id]: -# file_path_base = os.path.split(file_path)[1] -# # Just remove file if `frame` key was not in context or -# # filled path is in remainders (single file sequence) -# if not seq_path or file_path_base in remainders: -# if not os.path.exists(file_path): -# self.log.debug( -# "File was not found: {}".format(file_path) -# ) -# continue -# -# size += os.path.getsize(file_path) -# -# if delete: -# os.remove(file_path) -# self.log.debug("Removed file: {}".format(file_path)) -# -# if file_path_base in remainders: -# remainders.remove(file_path_base) -# continue -# -# seq_path_base = os.path.split(seq_path)[1] -# head, tail = seq_path_base.split(self.sequence_splitter) -# -# final_col = None -# for collection in collections: -# if head != collection.head or tail != collection.tail: -# continue -# final_col = collection -# break -# -# if final_col is not None: -# # Fill full path to head -# final_col.head = os.path.join(dir_path, final_col.head) -# for _file_path in final_col: -# if os.path.exists(_file_path): -# -# size += os.path.getsize(_file_path) -# -# if delete: -# os.remove(_file_path) -# self.log.debug( -# "Removed file: {}".format(_file_path) -# ) -# -# _seq_path = final_col.format("{head}{padding}{tail}") -# self.log.debug("Removed files: {}".format(_seq_path)) -# collections.remove(final_col) -# -# elif os.path.exists(file_path): -# size += os.path.getsize(file_path) -# -# if delete: -# os.remove(file_path) -# self.log.debug("Removed file: {}".format(file_path)) -# else: -# self.log.debug( -# "File was not found: {}".format(file_path) -# ) -# -# # Delete as much as possible parent folders -# if not delete: -# return size -# -# for dir_path in dir_paths.values(): -# while True: -# if not os.path.exists(dir_path): -# dir_path = os.path.dirname(dir_path) -# continue -# -# if len(os.listdir(dir_path)) != 0: -# break -# -# self.log.debug("Removed folder: {}".format(dir_path)) -# os.rmdir(dir_path) -# -# return size -# -# def message(self, text): -# msgBox = QtWidgets.QMessageBox() -# msgBox.setText(text) -# msgBox.setStyleSheet(style.load_stylesheet()) -# msgBox.setWindowFlags( -# msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint -# ) -# msgBox.exec_() -# -# def get_data(self, context, versions_count): -# subset = context["subset"] -# asset = context["asset"] -# project_name = context["project"]["name"] -# anatomy = Anatomy(project_name) -# -# versions = list(get_versions(project_name, subset_ids=[subset["_id"]])) -# -# versions_by_parent = collections.defaultdict(list) -# for ent in versions: -# versions_by_parent[ent["parent"]].append(ent) -# -# def sort_func(ent): -# return int(ent["name"]) -# -# all_last_versions = [] -# for _parent_id, _versions in versions_by_parent.items(): -# for idx, version in enumerate( -# sorted(_versions, key=sort_func, reverse=True) -# ): -# if idx >= versions_count: -# break -# all_last_versions.append(version) -# -# self.log.debug("Collected versions ({})".format(len(versions))) -# -# # Filter latest versions -# for version in all_last_versions: -# versions.remove(version) -# -# # Update versions_by_parent without filtered versions -# versions_by_parent = collections.defaultdict(list) -# for ent in versions: -# versions_by_parent[ent["parent"]].append(ent) -# -# # Filter already deleted versions -# versions_to_pop = [] -# for version in versions: -# version_tags = version["data"].get("tags") -# if version_tags and "deleted" in version_tags: -# versions_to_pop.append(version) -# -# for version in versions_to_pop: -# msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format( -# asset["name"], subset["name"], version["name"] -# ) -# self.log.debug(( -# "Skipping version. Already tagged as `deleted`. < {} >" -# ).format(msg)) -# versions.remove(version) -# -# version_ids = [ent["_id"] for ent in versions] -# -# self.log.debug( -# "Filtered versions to delete ({})".format(len(version_ids)) -# ) -# -# if not version_ids: -# msg = "Skipping processing. Nothing to delete on {}/{}".format( -# asset["name"], subset["name"] -# ) -# self.log.info(msg) -# print(msg) -# return -# -# repres = list(get_representations( -# project_name, version_ids=version_ids -# )) -# -# self.log.debug( -# "Collected representations to remove ({})".format(len(repres)) -# ) -# -# dir_paths = {} -# file_paths_by_dir = collections.defaultdict(list) -# for repre in repres: -# file_path, seq_path = self.path_from_representation(repre, anatomy) -# if file_path is None: -# self.log.debug(( -# "Could not format path for represenation \"{}\"" -# ).format(str(repre))) -# continue -# -# dir_path = os.path.dirname(file_path) -# dir_id = None -# for _dir_id, _dir_path in dir_paths.items(): -# if _dir_path == dir_path: -# dir_id = _dir_id -# break -# -# if dir_id is None: -# dir_id = uuid.uuid4() -# dir_paths[dir_id] = dir_path -# -# file_paths_by_dir[dir_id].append([file_path, seq_path]) -# -# dir_ids_to_pop = [] -# for dir_id, dir_path in dir_paths.items(): -# if os.path.exists(dir_path): -# continue -# -# dir_ids_to_pop.append(dir_id) -# -# # Pop dirs from both dictionaries -# for dir_id in dir_ids_to_pop: -# dir_paths.pop(dir_id) -# paths = file_paths_by_dir.pop(dir_id) -# # TODO report of missing directories? -# paths_msg = ", ".join([ -# "'{}'".format(path[0].replace("\\", "/")) for path in paths -# ]) -# self.log.debug(( -# "Folder does not exist. Deleting it's files skipped: {}" -# ).format(paths_msg)) -# -# data = { -# "dir_paths": dir_paths, -# "file_paths_by_dir": file_paths_by_dir, -# "versions": versions, -# "asset": asset, -# "subset": subset, -# "archive_subset": versions_count == 0 -# } -# -# return data -# -# def main(self, project_name, data, remove_publish_folder): -# # Size of files. -# size = 0 -# if not data: -# return size -# -# if remove_publish_folder: -# size = self.delete_whole_dir_paths(data["dir_paths"].values()) -# else: -# size = self.delete_only_repre_files( -# data["dir_paths"], data["file_paths_by_dir"] -# ) -# -# mongo_changes_bulk = [] -# for version in data["versions"]: -# orig_version_tags = version["data"].get("tags") or [] -# version_tags = [tag for tag in orig_version_tags] -# if "deleted" not in version_tags: -# version_tags.append("deleted") -# -# if version_tags == orig_version_tags: -# continue -# -# update_query = {"_id": version["_id"]} -# update_data = {"$set": {"data.tags": version_tags}} -# mongo_changes_bulk.append(UpdateOne(update_query, update_data)) -# -# if data["archive_subset"]: -# mongo_changes_bulk.append(UpdateOne( -# { -# "_id": data["subset"]["_id"], -# "type": "subset" -# }, -# {"$set": {"type": "archived_subset"}} -# )) -# -# if mongo_changes_bulk: -# dbcon = AvalonMongoDB() -# dbcon.Session["AYON_PROJECT_NAME"] = project_name -# dbcon.install() -# dbcon.bulk_write(mongo_changes_bulk) -# dbcon.uninstall() -# -# self._ftrack_delete_versions(data) -# -# return size -# -# def _ftrack_delete_versions(self, data): -# """Delete version on ftrack. -# -# Handling of ftrack logic in this plugin is not ideal. But in OP3 it is -# almost impossible to solve the issue other way. -# -# Note: -# Asset versions on ftrack are not deleted but marked as -# "not published" which cause that they're invisible. -# -# Args: -# data (dict): Data sent to subset loader with full context. -# """ -# -# # First check for ftrack id on asset document -# # - skip if ther is none -# asset_ftrack_id = data["asset"]["data"].get("ftrackId") -# if not asset_ftrack_id: -# self.log.info(( -# "Asset does not have filled ftrack id. Skipped delete" -# " of ftrack version." -# )) -# return -# -# # Check if ftrack module is enabled -# addons_manager = AddonsManager() -# ftrack_addon = addons_manager.get("ftrack") -# if not ftrack_addon or not ftrack_addon.enabled: -# return -# -# import ftrack_api -# -# session = ftrack_api.Session() -# product_name = data["subset"]["name"] -# versions = { -# '"{}"'.format(version_doc["name"]) -# for version_doc in data["versions"] -# } -# asset_versions = session.query( -# ( -# "select id, is_published from AssetVersion where" -# " asset.parent.id is \"{}\"" -# " and asset.name is \"{}\"" -# " and version in ({})" -# ).format( -# asset_ftrack_id, -# product_name, -# ",".join(versions) -# ) -# ).all() -# -# # Set attribute `is_published` to `False` on ftrack AssetVersions -# for asset_version in asset_versions: -# asset_version["is_published"] = False -# -# try: -# session.commit() -# -# except Exception: -# msg = ( -# "Could not set `is_published` attribute to `False`" -# " for selected AssetVersions." -# ) -# self.log.error(msg) -# self.message(msg) -# -# def load(self, contexts, name=None, namespace=None, options=None): -# try: -# size = 0 -# for count, context in enumerate(contexts): -# versions_to_keep = 2 -# remove_publish_folder = False -# if options: -# versions_to_keep = options.get( -# "versions_to_keep", versions_to_keep -# ) -# remove_publish_folder = options.get( -# "remove_publish_folder", remove_publish_folder -# ) -# -# data = self.get_data(context, versions_to_keep) -# if not data: -# continue -# -# project_name = context["project"]["name"] -# size += self.main(project_name, data, remove_publish_folder) -# print("Progressing {}/{}".format(count + 1, len(contexts))) -# -# msg = "Total size of files: {}".format(format_file_size(size)) -# self.log.info(msg) -# self.message(msg) -# -# except Exception: -# self.log.error("Failed to delete versions.", exc_info=True) -# -# -# class CalculateOldVersions(DeleteOldVersions): -# """Calculate file size of old versions""" -# label = "Calculate Old Versions" -# order = 30 -# tool_names = ["library_loader"] -# -# options = [ -# qargparse.Integer( -# "versions_to_keep", default=2, min=0, help="Versions to keep:" -# ), -# qargparse.Boolean( -# "remove_publish_folder", help="Remove publish folder:" -# ) -# ] -# -# def main(self, project_name, data, remove_publish_folder): -# size = 0 -# -# if not data: -# return size -# -# if remove_publish_folder: -# size = self.delete_whole_dir_paths( -# data["dir_paths"].values(), delete=False -# ) -# else: -# size = self.delete_only_repre_files( -# data["dir_paths"], data["file_paths_by_dir"], delete=False -# ) -# -# return size +import clique +import ayon_api +from ayon_api.operations import OperationsSession +import qargparse +from qtpy import QtWidgets, QtCore + +from ayon_core import style +from ayon_core.lib import format_file_size +from ayon_core.pipeline import load, Anatomy +from ayon_core.pipeline.load import ( + get_representation_path_with_anatomy, + InvalidRepresentationContext, +) + + +class DeleteOldVersions(load.ProductLoaderPlugin): + """Deletes specific number of old version""" + + is_multiple_contexts_compatible = True + sequence_splitter = "__sequence_splitter__" + + representations = ["*"] + product_types = {"*"} + tool_names = ["library_loader"] + + label = "Delete Old Versions" + order = 35 + icon = "trash" + color = "#d8d8d8" + + options = [ + qargparse.Integer( + "versions_to_keep", default=2, min=0, help="Versions to keep:" + ), + qargparse.Boolean( + "remove_publish_folder", help="Remove publish folder:" + ) + ] + + def delete_whole_dir_paths(self, dir_paths, delete=True): + size = 0 + + for dir_path in dir_paths: + # Delete all files and fodlers in dir path + for root, dirs, files in os.walk(dir_path, topdown=False): + for name in files: + file_path = os.path.join(root, name) + size += os.path.getsize(file_path) + if delete: + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + + for name in dirs: + if delete: + os.rmdir(os.path.join(root, name)) + + if not delete: + continue + + # Delete even the folder and it's parents folders if they are empty + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + os.rmdir(os.path.join(dir_path)) + + return size + + def path_from_representation(self, representation, anatomy): + try: + context = representation["context"] + except KeyError: + return (None, None) + + try: + path = get_representation_path_with_anatomy( + representation, anatomy + ) + except InvalidRepresentationContext: + return (None, None) + + sequence_path = None + if "frame" in context: + context["frame"] = self.sequence_splitter + sequence_path = get_representation_path_with_anatomy( + representation, anatomy + ) + + if sequence_path: + sequence_path = sequence_path.normalized() + + return (path.normalized(), sequence_path) + + def delete_only_repre_files(self, dir_paths, file_paths, delete=True): + size = 0 + + for dir_id, dir_path in dir_paths.items(): + dir_files = os.listdir(dir_path) + collections, remainders = clique.assemble(dir_files) + for file_path, seq_path in file_paths[dir_id]: + file_path_base = os.path.split(file_path)[1] + # Just remove file if `frame` key was not in context or + # filled path is in remainders (single file sequence) + if not seq_path or file_path_base in remainders: + if not os.path.exists(file_path): + self.log.debug( + "File was not found: {}".format(file_path) + ) + continue + + size += os.path.getsize(file_path) + + if delete: + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + + if file_path_base in remainders: + remainders.remove(file_path_base) + continue + + seq_path_base = os.path.split(seq_path)[1] + head, tail = seq_path_base.split(self.sequence_splitter) + + final_col = None + for collection in collections: + if head != collection.head or tail != collection.tail: + continue + final_col = collection + break + + if final_col is not None: + # Fill full path to head + final_col.head = os.path.join(dir_path, final_col.head) + for _file_path in final_col: + if os.path.exists(_file_path): + + size += os.path.getsize(_file_path) + + if delete: + os.remove(_file_path) + self.log.debug( + "Removed file: {}".format(_file_path) + ) + + _seq_path = final_col.format("{head}{padding}{tail}") + self.log.debug("Removed files: {}".format(_seq_path)) + collections.remove(final_col) + + elif os.path.exists(file_path): + size += os.path.getsize(file_path) + + if delete: + os.remove(file_path) + self.log.debug("Removed file: {}".format(file_path)) + else: + self.log.debug( + "File was not found: {}".format(file_path) + ) + + # Delete as much as possible parent folders + if not delete: + return size + + for dir_path in dir_paths.values(): + while True: + if not os.path.exists(dir_path): + dir_path = os.path.dirname(dir_path) + continue + + if len(os.listdir(dir_path)) != 0: + break + + self.log.debug("Removed folder: {}".format(dir_path)) + os.rmdir(dir_path) + + return size + + def message(self, text): + msgBox = QtWidgets.QMessageBox() + msgBox.setText(text) + msgBox.setStyleSheet(style.load_stylesheet()) + msgBox.setWindowFlags( + msgBox.windowFlags() | QtCore.Qt.FramelessWindowHint + ) + msgBox.exec_() + + def get_data(self, context, versions_count): + product_entity = context["product"] + folder_entity = context["folder"] + project_name = context["project"]["name"] + anatomy = Anatomy(project_name, project_entity=context["project"]) + + version_fields = ayon_api.get_default_fields_for_type("version") + version_fields.add("tags") + versions = list(ayon_api.get_versions( + project_name, + product_ids=[product_entity["id"]], + active=None, + hero=False, + fields=version_fields + )) + self.log.debug( + "Version Number ({})".format(len(versions)) + ) + versions_by_parent = collections.defaultdict(list) + for ent in versions: + versions_by_parent[ent["productId"]].append(ent) + + def sort_func(ent): + return int(ent["version"]) + + all_last_versions = [] + for _parent_id, _versions in versions_by_parent.items(): + for idx, version in enumerate( + sorted(_versions, key=sort_func, reverse=True) + ): + if idx >= versions_count: + break + all_last_versions.append(version) + + self.log.debug("Collected versions ({})".format(len(versions))) + + # Filter latest versions + for version in all_last_versions: + versions.remove(version) + + # Update versions_by_parent without filtered versions + versions_by_parent = collections.defaultdict(list) + for ent in versions: + versions_by_parent[ent["productId"]].append(ent) + + # Filter already deleted versions + versions_to_pop = [] + for version in versions: + if "deleted" in version["tags"]: + versions_to_pop.append(version) + + for version in versions_to_pop: + msg = "Folder: \"{}\" | Product: \"{}\" | Version: \"{}\"".format( + folder_entity["path"], + product_entity["name"], + version["version"] + ) + self.log.debug(( + "Skipping version. Already tagged as inactive. < {} >" + ).format(msg)) + versions.remove(version) + + version_ids = [ent["id"] for ent in versions] + + self.log.debug( + "Filtered versions to delete ({})".format(len(version_ids)) + ) + + if not version_ids: + msg = "Skipping processing. Nothing to delete on {}/{}".format( + folder_entity["path"], product_entity["name"] + ) + self.log.info(msg) + print(msg) + return + + repres = list(ayon_api.get_representations( + project_name, version_ids=version_ids + )) + + self.log.debug( + "Collected representations to remove ({})".format(len(repres)) + ) + + dir_paths = {} + file_paths_by_dir = collections.defaultdict(list) + for repre in repres: + file_path, seq_path = self.path_from_representation( + repre, anatomy + ) + if file_path is None: + self.log.debug(( + "Could not format path for represenation \"{}\"" + ).format(str(repre))) + continue + + dir_path = os.path.dirname(file_path) + dir_id = None + for _dir_id, _dir_path in dir_paths.items(): + if _dir_path == dir_path: + dir_id = _dir_id + break + + if dir_id is None: + dir_id = uuid.uuid4() + dir_paths[dir_id] = dir_path + + file_paths_by_dir[dir_id].append([file_path, seq_path]) + + dir_ids_to_pop = [] + for dir_id, dir_path in dir_paths.items(): + if os.path.exists(dir_path): + continue + + dir_ids_to_pop.append(dir_id) + + # Pop dirs from both dictionaries + for dir_id in dir_ids_to_pop: + dir_paths.pop(dir_id) + paths = file_paths_by_dir.pop(dir_id) + # TODO report of missing directories? + paths_msg = ", ".join([ + "'{}'".format(path[0].replace("\\", "/")) for path in paths + ]) + self.log.debug(( + "Folder does not exist. Deleting its files skipped: {}" + ).format(paths_msg)) + + return { + "dir_paths": dir_paths, + "file_paths_by_dir": file_paths_by_dir, + "versions": versions, + "folder": folder_entity, + "product": product_entity, + "archive_product": versions_count == 0 + } + + def main(self, project_name, data, remove_publish_folder): + # Size of files. + size = 0 + if not data: + return size + + if remove_publish_folder: + size = self.delete_whole_dir_paths(data["dir_paths"].values()) + else: + size = self.delete_only_repre_files( + data["dir_paths"], data["file_paths_by_dir"] + ) + + op_session = OperationsSession() + for version in data["versions"]: + orig_version_tags = version["tags"] + version_tags = list(orig_version_tags) + changes = {} + if "deleted" not in version_tags: + version_tags.append("deleted") + changes["tags"] = version_tags + + if version["active"]: + changes["active"] = False + + if not changes: + continue + op_session.update_entity( + project_name, "version", version["id"], changes + ) + + op_session.commit() + + return size + + def load(self, contexts, name=None, namespace=None, options=None): + try: + size = 0 + for count, context in enumerate(contexts): + versions_to_keep = 2 + remove_publish_folder = False + if options: + versions_to_keep = options.get( + "versions_to_keep", versions_to_keep + ) + remove_publish_folder = options.get( + "remove_publish_folder", remove_publish_folder + ) + + data = self.get_data(context, versions_to_keep) + if not data: + continue + project_name = context["project"]["name"] + size += self.main(project_name, data, remove_publish_folder) + print("Progressing {}/{}".format(count + 1, len(contexts))) + + msg = "Total size of files: {}".format(format_file_size(size)) + self.log.info(msg) + self.message(msg) + + except Exception: + self.log.error("Failed to delete versions.", exc_info=True) + + +class CalculateOldVersions(DeleteOldVersions): + """Calculate file size of old versions""" + label = "Calculate Old Versions" + order = 30 + tool_names = ["library_loader"] + + options = [ + qargparse.Integer( + "versions_to_keep", default=2, min=0, help="Versions to keep:" + ), + qargparse.Boolean( + "remove_publish_folder", help="Remove publish folder:" + ) + ] + + def main(self, project_name, data, remove_publish_folder): + size = 0 + + if not data: + return size + + if remove_publish_folder: + size = self.delete_whole_dir_paths( + data["dir_paths"].values(), delete=False + ) + else: + size = self.delete_only_repre_files( + data["dir_paths"], data["file_paths_by_dir"], delete=False + ) + + return size diff --git a/client/ayon_core/plugins/load/delivery.py b/client/ayon_core/plugins/load/delivery.py index 16f315937b..c7954a18b2 100644 --- a/client/ayon_core/plugins/load/delivery.py +++ b/client/ayon_core/plugins/load/delivery.py @@ -2,9 +2,9 @@ import copy import platform from collections import defaultdict +import ayon_api from qtpy import QtWidgets, QtCore, QtGui -from ayon_core.client import get_representations from ayon_core.pipeline import load, Anatomy from ayon_core import resources, style @@ -22,14 +22,14 @@ from ayon_core.pipeline.delivery import ( ) -class Delivery(load.SubsetLoaderPlugin): +class Delivery(load.ProductLoaderPlugin): """Export selected versions to folder structure from Template""" is_multiple_contexts_compatible = True sequence_splitter = "__sequence_splitter__" - representations = ["*"] - families = ["*"] + representations = {"*"} + product_types = {"*"} tool_names = ["library_loader"] label = "Deliver Versions" @@ -91,9 +91,15 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): longest_key = max(self.templates.keys(), key=len) dropdown.setMinimumContentsLength(len(longest_key)) - template_label = QtWidgets.QLabel() - template_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) - template_label.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse) + template_dir_label = QtWidgets.QLabel() + template_dir_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) + template_dir_label.setTextInteractionFlags( + QtCore.Qt.TextSelectableByMouse) + + template_file_label = QtWidgets.QLabel() + template_file_label.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) + template_file_label.setTextInteractionFlags( + QtCore.Qt.TextSelectableByMouse) renumber_frame = QtWidgets.QCheckBox() @@ -123,7 +129,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): input_layout.addRow("Selected representations", selected_label) input_layout.addRow("Delivery template", dropdown) - input_layout.addRow("Template value", template_label) + input_layout.addRow("Directory template", template_dir_label) + input_layout.addRow("File template", template_file_label) input_layout.addRow("Renumber Frame", renumber_frame) input_layout.addRow("Renumber start frame", first_frame_start) input_layout.addRow("Root", root_line_edit) @@ -151,7 +158,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): layout.addWidget(text_area) self.selected_label = selected_label - self.template_label = template_label + self.template_dir_label = template_dir_label + self.template_file_label = template_file_label self.dropdown = dropdown self.first_frame_start = first_frame_start self.renumber_frame = renumber_frame @@ -202,7 +210,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): ) anatomy_data = copy.deepcopy(repre["context"]) - new_report_items = check_destination_path(str(repre["_id"]), + new_report_items = check_destination_path(repre["id"], self.anatomy, anatomy_data, datetime_data, @@ -260,7 +268,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): report_items.update(new_report_items) self._update_progress(uploaded) else: # fallback for Pype2 and representations without files - frame = repre['context'].get('frame') + frame = repre["context"].get("frame") if frame: repre["context"]["frame"] = len(str(frame)) * "#" @@ -282,7 +290,13 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): """Adds list of delivery templates from Anatomy to dropdown.""" templates = {} for template_name, value in anatomy.templates["delivery"].items(): - if not isinstance(value, str) or not value.startswith('{root'): + directory_template = value["directory"] + if not directory_template.startswith("{root"): + self.log.warning( + "Skipping template '%s' because directory template does " + "not start with `{root` in value: %s", + template_name, directory_template + ) continue templates[template_name] = value @@ -290,9 +304,9 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): return templates def _set_representations(self, project_name, contexts): - version_ids = [context["version"]["_id"] for context in contexts] + version_ids = {context["version"]["id"] for context in contexts} - repres = list(get_representations( + repres = list(ayon_api.get_representations( project_name, version_ids=version_ids )) @@ -346,7 +360,8 @@ class DeliveryOptionsDialog(QtWidgets.QDialog): name = self.dropdown.currentText() template_value = self.templates.get(name) if template_value: - self.template_label.setText(template_value) + self.template_dir_label.setText(template_value["directory"]) + self.template_file_label.setText(template_value["file"]) self.btn_delivery.setEnabled(bool(self._get_selected_repres())) def _update_progress(self, uploaded): diff --git a/client/ayon_core/plugins/load/open_djv.py b/client/ayon_core/plugins/load/open_djv.py deleted file mode 100644 index 70352c2435..0000000000 --- a/client/ayon_core/plugins/load/open_djv.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -from ayon_core.lib import ApplicationManager -from ayon_core.pipeline import load - - -def existing_djv_path(): - app_manager = ApplicationManager() - djv_list = [] - - for app_name, app in app_manager.applications.items(): - if 'djv' in app_name and app.find_executable(): - djv_list.append(app_name) - - return djv_list - - -class OpenInDJV(load.LoaderPlugin): - """Open Image Sequence with system default""" - - djv_list = existing_djv_path() - families = ["*"] if djv_list else [] - representations = ["*"] - extensions = { - "cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg", - "mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut", - "1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf", - "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img", "h264", - } - - label = "Open in DJV" - order = -10 - icon = "play-circle" - color = "orange" - - def load(self, context, name, namespace, data): - import clique - - path = self.filepath_from_context(context) - directory = os.path.dirname(path) - - pattern = clique.PATTERNS["frames"] - files = os.listdir(directory) - collections, remainder = clique.assemble( - files, - patterns=[pattern], - minimum_items=1 - ) - - if not remainder: - sequence = collections[0] - first_image = list(sequence)[0] - else: - first_image = path - filepath = os.path.normpath(os.path.join(directory, first_image)) - - self.log.info("Opening : {}".format(filepath)) - - last_djv_version = sorted(self.djv_list)[-1] - - app_manager = ApplicationManager() - djv = app_manager.applications.get(last_djv_version) - djv.arguments.append(filepath) - - app_manager.launch(last_djv_version) diff --git a/client/ayon_core/plugins/load/open_file.py b/client/ayon_core/plugins/load/open_file.py index 5ae5959102..3b5fbbc0c9 100644 --- a/client/ayon_core/plugins/load/open_file.py +++ b/client/ayon_core/plugins/load/open_file.py @@ -18,8 +18,8 @@ def open(filepath): class OpenFile(load.LoaderPlugin): """Open Image Sequence or Video with system default""" - families = ["render2d"] - representations = ["*"] + product_types = {"render2d"} + representations = {"*"} label = "Open" order = -10 diff --git a/client/ayon_core/plugins/load/push_to_library.py b/client/ayon_core/plugins/load/push_to_library.py index 39f95d134c..981028d734 100644 --- a/client/ayon_core/plugins/load/push_to_library.py +++ b/client/ayon_core/plugins/load/push_to_library.py @@ -6,13 +6,13 @@ from ayon_core.pipeline import load from ayon_core.pipeline.load import LoadError -class PushToLibraryProject(load.SubsetLoaderPlugin): +class PushToLibraryProject(load.ProductLoaderPlugin): """Export selected versions to folder structure from Template""" is_multiple_contexts_compatible = True - representations = ["*"] - families = ["*"] + representations = {"*"} + product_types = {"*"} label = "Push to Library project" order = 35 @@ -40,10 +40,8 @@ class PushToLibraryProject(load.SubsetLoaderPlugin): "main.py" ) - project_doc = context["project"] - version_doc = context["version"] - project_name = project_doc["name"] - version_id = str(version_doc["_id"]) + project_name = context["project"]["name"] + version_id = context["version"]["id"] args = get_ayon_launcher_args( "run", diff --git a/client/ayon_core/plugins/publish/collect_anatomy_context_data.py b/client/ayon_core/plugins/publish/collect_anatomy_context_data.py index b5bb579498..cccf392e40 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_context_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_context_data.py @@ -3,7 +3,8 @@ Requires: context -> anatomy context -> projectEntity - context -> assetEntity + context -> folderEntity + context -> taskEntity context -> task context -> username context -> datetimeData @@ -49,15 +50,15 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): host_name = context.data["hostName"] project_settings = context.data["project_settings"] project_entity = context.data["projectEntity"] - asset_entity = context.data.get("assetEntity") - task_name = None - if asset_entity: - task_name = context.data["task"] + folder_entity = context.data.get("folderEntity") + task_entity = None + if folder_entity: + task_entity = context.data["taskEntity"] anatomy_data = get_template_data( project_entity, - asset_entity, - task_name, + folder_entity, + task_entity, host_name, project_settings ) diff --git a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py index b62935dd6a..f8cc81e718 100644 --- a/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py +++ b/client/ayon_core/plugins/publish/collect_anatomy_instance_data.py @@ -1,13 +1,17 @@ """ Requires: - context -> anatomyData + context -> projectName context -> projectEntity - context -> assetEntity + context -> anatomyData instance -> folderPath instance -> productName instance -> productType Optional: + context -> folderEntity + context -> taskEntity + instance -> task + instance -> taskEntity instance -> version instance -> resolutionWidth instance -> resolutionHeight @@ -15,7 +19,8 @@ Optional: Provides: instance -> projectEntity - instance -> assetEntity + instance -> folderEntity + instance -> taskEntity instance -> anatomyData instance -> version instance -> latestVersion @@ -26,13 +31,8 @@ import json import collections import pyblish.api +import ayon_api -from ayon_core.client import ( - get_assets, - get_subsets, - get_last_versions, - get_asset_name_identifier, -) from ayon_core.pipeline.version_start import get_versioning_start @@ -51,73 +51,174 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): self.log.debug("Collecting anatomy data for all instances.") project_name = context.data["projectName"] - self.fill_missing_asset_docs(context, project_name) + self.fill_missing_folder_entities(context, project_name) + self.fill_missing_task_entities(context, project_name) self.fill_latest_versions(context, project_name) self.fill_anatomy_data(context) self.log.debug("Anatomy Data collection finished.") - def fill_missing_asset_docs(self, context, project_name): - self.log.debug("Querying asset documents for instances.") + def fill_missing_folder_entities(self, context, project_name): + self.log.debug("Querying folder entities for instances.") - context_asset_doc = context.data.get("assetEntity") - context_asset_name = None - if context_asset_doc: - context_asset_name = get_asset_name_identifier(context_asset_doc) + context_folder_entity = context.data.get("folderEntity") + context_folder_path = None + if context_folder_entity: + context_folder_path = context_folder_entity["path"] - instances_with_missing_asset_doc = collections.defaultdict(list) + instances_missing_folder = collections.defaultdict(list) for instance in context: - instance_asset_doc = instance.data.get("assetEntity") - _asset_name = instance.data["folderPath"] + instance_folder_entity = instance.data.get("folderEntity") + _folder_path = instance.data["folderPath"] - # There is possibility that assetEntity on instance is already set - # which can happen in standalone publisher - if instance_asset_doc: - instance_asset_name = get_asset_name_identifier( - instance_asset_doc) - if instance_asset_name == _asset_name: + # There is possibility that folderEntity on instance is set + if instance_folder_entity: + instance_folder_path = instance_folder_entity["path"] + if instance_folder_path == _folder_path: continue - # Check if asset name is the same as what is in context - # - they may be different, e.g. in NukeStudio - if context_asset_name and context_asset_name == _asset_name: - instance.data["assetEntity"] = context_asset_doc + # Check if folder path is the same as what is in context + # - they may be different, e.g. during editorial publishing + if context_folder_path and context_folder_path == _folder_path: + instance.data["folderEntity"] = context_folder_entity else: - instances_with_missing_asset_doc[_asset_name].append(instance) + instances_missing_folder[_folder_path].append( + instance + ) - if not instances_with_missing_asset_doc: - self.log.debug("All instances already had right asset document.") + if not instances_missing_folder: + self.log.debug("All instances already had right folder entity.") return - asset_names = list(instances_with_missing_asset_doc.keys()) - self.log.debug("Querying asset documents with names: {}".format( - ", ".join(["\"{}\"".format(name) for name in asset_names]) + folder_paths = list(instances_missing_folder.keys()) + self.log.debug("Querying folder entities with paths: {}".format( + ", ".join(["\"{}\"".format(path) for path in folder_paths]) )) - asset_docs = get_assets(project_name, asset_names=asset_names) - asset_docs_by_name = { - get_asset_name_identifier(asset_doc): asset_doc - for asset_doc in asset_docs + folder_entities_by_path = { + folder_entity["path"]: folder_entity + for folder_entity in ayon_api.get_folders( + project_name, folder_paths=folder_paths + ) } - not_found_asset_names = [] - for asset_name, instances in instances_with_missing_asset_doc.items(): - asset_doc = asset_docs_by_name.get(asset_name) - if not asset_doc: - not_found_asset_names.append(asset_name) + not_found_folder_paths = [] + for folder_path, instances in instances_missing_folder.items(): + folder_entity = folder_entities_by_path.get(folder_path) + if not folder_entity: + not_found_folder_paths.append(folder_path) continue for _instance in instances: - _instance.data["assetEntity"] = asset_doc + _instance.data["folderEntity"] = folder_entity - if not_found_asset_names: - joined_asset_names = ", ".join( - ["\"{}\"".format(name) for name in not_found_asset_names] + if not_found_folder_paths: + joined_folder_paths = ", ".join( + ["\"{}\"".format(path) for path in not_found_folder_paths] ) self.log.warning(( - "Not found asset documents with names \"{}\"." - ).format(joined_asset_names)) + "Not found folder entities with paths \"{}\"." + ).format(joined_folder_paths)) + + def fill_missing_task_entities(self, context, project_name): + self.log.debug("Querying task entities for instances.") + + context_folder_entity = context.data.get("folderEntity") + context_folder_id = None + if context_folder_entity: + context_folder_id = context_folder_entity["id"] + context_task_entity = context.data.get("taskEntity") + context_task_name = None + if context_task_entity: + context_task_name = context_task_entity["name"] + + instances_missing_task = {} + folder_path_by_id = {} + for instance in context: + folder_entity = instance.data.get("folderEntity") + # Skip if instnace does not have filled folder entity + if not folder_entity: + continue + folder_id = folder_entity["id"] + folder_path_by_id[folder_id] = folder_entity["path"] + + task_entity = instance.data.get("taskEntity") + _task_name = instance.data.get("task") + + # There is possibility that taskEntity on instance is set + if task_entity: + task_parent_id = task_entity["folderId"] + instance_task_name = task_entity["name"] + if ( + folder_id == task_parent_id + and instance_task_name == _task_name + ): + continue + + # Check if folder path is the same as what is in context + # - they may be different, e.g. in NukeStudio + if ( + context_folder_id == folder_id + and context_task_name == _task_name + ): + instance.data["taskEntity"] = context_task_entity + continue + + _by_folder_id = instances_missing_task.setdefault(folder_id, {}) + _by_task_name = _by_folder_id.setdefault(_task_name, []) + _by_task_name.append(instance) + + if not instances_missing_task: + self.log.debug("All instances already had right task entity.") + return + + self.log.debug("Querying task entities") + + all_folder_ids = set(instances_missing_task.keys()) + all_task_names = set() + for per_task in instances_missing_task.values(): + all_task_names |= set(per_task.keys()) + all_task_names.discard(None) + + task_entities = [] + if all_task_names: + task_entities = ayon_api.get_tasks( + project_name, + folder_ids=all_folder_ids, + task_names=all_task_names + ) + task_entity_by_ids = {} + for task_entity in task_entities: + folder_id = task_entity["folderId"] + task_name = task_entity["name"] + _by_folder_id = task_entity_by_ids.setdefault(folder_id, {}) + _by_folder_id[task_name] = task_entity + + not_found_task_paths = [] + for folder_id, by_task in instances_missing_task.items(): + for task_name, instances in by_task.items(): + task_entity = ( + task_entity_by_ids + .get(folder_id, {}) + .get(task_name) + ) + if task_name and not task_entity: + folder_path = folder_path_by_id[folder_id] + not_found_task_paths.append( + "/".join([folder_path, task_name]) + ) + + for instance in instances: + instance.data["taskEntity"] = task_entity + + if not_found_task_paths: + joined_paths = ", ".join( + ["\"{}\"".format(path) for path in not_found_task_paths] + ) + self.log.warning(( + "Not found task entities with paths \"{}\"." + ).format(joined_paths)) def fill_latest_versions(self, context, project_name): """Try to find latest version for each instance's product name. @@ -140,13 +241,13 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): latest_version = instance.data.get("latestVersion") instance.data["latestVersion"] = latest_version - # Skip instances without "assetEntity" - asset_doc = instance.data.get("assetEntity") - if not asset_doc: + # Skip instances without "folderEntity" + folder_entity = instance.data.get("folderEntity") + if not folder_entity: continue # Store folder ids and product names for queries - folder_id = asset_doc["_id"] + folder_id = folder_entity["id"] product_name = instance.data["productName"] # Prepare instance hierarchy for faster filling latest versions @@ -157,37 +258,41 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): hierarchy[folder_id][product_name].append(instance) names_by_folder_ids[folder_id].add(product_name) - subset_docs = [] + product_entities = [] if names_by_folder_ids: - subset_docs = list(get_subsets( - project_name, names_by_asset_ids=names_by_folder_ids + product_entities = list(ayon_api.get_products( + project_name, names_by_folder_ids=names_by_folder_ids )) product_ids = { - subset_doc["_id"] - for subset_doc in subset_docs + product_entity["id"] + for product_entity in product_entities } - last_version_docs_by_product_id = get_last_versions( - project_name, product_ids, fields=["name"] + last_versions_by_product_id = ayon_api.get_last_versions( + project_name, product_ids, fields={"version"} ) - for subset_doc in subset_docs: - product_id = subset_doc["_id"] - last_version_doc = last_version_docs_by_product_id.get(product_id) - if last_version_doc is None: + for product_entity in product_entities: + product_id = product_entity["id"] + last_version_entity = last_versions_by_product_id.get(product_id) + if last_version_entity is None: continue - folder_id = subset_doc["parent"] - product_name = subset_doc["name"] + last_version = last_version_entity["version"] + folder_id = product_entity["folderId"] + product_name = product_entity["name"] _instances = hierarchy[folder_id][product_name] for _instance in _instances: - _instance.data["latestVersion"] = last_version_doc["name"] + _instance.data["latestVersion"] = last_version def fill_anatomy_data(self, context): self.log.debug("Storing anatomy data to instance data.") - project_doc = context.data["projectEntity"] - project_task_types = project_doc["config"]["tasks"] + project_entity = context.data["projectEntity"] + task_types_by_name = { + task_type["name"]: task_type + for task_type in project_entity["taskTypes"] + } for instance in context: anatomy_data = copy.deepcopy(context.data["anatomyData"]) @@ -202,8 +307,8 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): } }) - self._fill_asset_data(instance, project_doc, anatomy_data) - self._fill_task_data(instance, project_task_types, anatomy_data) + self._fill_folder_data(instance, project_entity, anatomy_data) + self._fill_task_data(instance, task_types_by_name, anatomy_data) # Define version version_number = None @@ -258,7 +363,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): anatomy_data["fps"] = float("{:0.2f}".format(float(fps))) # Store anatomy data - instance.data["projectEntity"] = project_doc + instance.data["projectEntity"] = project_entity instance.data["anatomyData"] = anatomy_data instance.data["version"] = version_number @@ -272,24 +377,28 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): json.dumps(anatomy_data, indent=4) )) - def _fill_asset_data(self, instance, project_doc, anatomy_data): - # QUESTION should we make sure that all asset data are poped if asset - # data cannot be found? - # - 'asset', 'hierarchy', 'parent', 'folder' - asset_doc = instance.data.get("assetEntity") - if asset_doc: - parents = asset_doc["data"].get("parents") or list() - parent_name = project_doc["name"] - if parents: - parent_name = parents[-1] + def _fill_folder_data(self, instance, project_entity, anatomy_data): + # QUESTION should we make sure that all folder data are poped if + # folder data cannot be found? + # - 'folder', 'hierarchy', 'parent', 'folder' + folder_entity = instance.data.get("folderEntity") + if folder_entity: + folder_name = folder_entity["name"] + folder_path = folder_entity["path"] + hierarchy_parts = folder_path.split("/") + hierarchy_parts.pop(0) + hierarchy_parts.pop(-1) + parent_name = project_entity["name"] + if hierarchy_parts: + parent_name = hierarchy_parts[-1] - hierarchy = "/".join(parents) + hierarchy = "/".join(hierarchy_parts) anatomy_data.update({ - "asset": asset_doc["name"], + "asset": folder_name, "hierarchy": hierarchy, "parent": parent_name, "folder": { - "name": asset_doc["name"], + "name": folder_name, }, }) return @@ -298,21 +407,21 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): hierarchy = instance.data["hierarchy"] anatomy_data["hierarchy"] = hierarchy - parent_name = project_doc["name"] + parent_name = project_entity["name"] if hierarchy: parent_name = hierarchy.split("/")[-1] - asset_name = instance.data["folderPath"].split("/")[-1] + folder_name = instance.data["folderPath"].split("/")[-1] anatomy_data.update({ - "asset": asset_name, + "asset": folder_name, "hierarchy": hierarchy, "parent": parent_name, "folder": { - "name": asset_name, + "name": folder_name, }, }) - def _fill_task_data(self, instance, project_task_types, anatomy_data): + def _fill_task_data(self, instance, task_types_by_name, anatomy_data): # QUESTION should we make sure that all task data are poped if task # data cannot be resolved? # - 'task' @@ -322,10 +431,10 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): if not task_name: return - # Find task data based on asset entity - asset_doc = instance.data.get("assetEntity") - task_data = self._get_task_data_from_asset( - asset_doc, task_name, project_task_types + # Find task data based on folder entity + task_entity = instance.data.get("taskEntity") + task_data = self._get_task_data_from_entity( + task_entity, task_types_by_name ) if task_data: # Fill task data @@ -341,30 +450,34 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): if not instance.data.get("newAssetPublishing"): return - # Try to find task data based on hierarchy context and asset name + # Try to find task data based on hierarchy context and folder path hierarchy_context = instance.context.data.get("hierarchyContext") - asset_name = instance.data.get("folderPath") - if not hierarchy_context or not asset_name: + folder_path = instance.data.get("folderPath") + if not hierarchy_context or not folder_path: return project_name = instance.context.data["projectName"] - if "/" not in asset_name: + if "/" not in folder_path: tasks_info = self._find_tasks_info_in_hierarchy( - hierarchy_context, asset_name + hierarchy_context, folder_path ) else: current_data = hierarchy_context.get(project_name, {}) - for key in asset_name.split("/"): + for key in folder_path.split("/"): if key: - current_data = current_data.get("childs", {}).get(key, {}) + current_data = ( + current_data + .get("children", {}) + .get(key, {}) + ) tasks_info = current_data.get("tasks", {}) task_info = tasks_info.get(task_name, {}) task_type = task_info.get("type") task_code = ( - project_task_types + task_types_by_name .get(task_type, {}) - .get("short_name") + .get("shortName") ) anatomy_data["task"] = { "name": task_name, @@ -372,43 +485,41 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): "short": task_code } - def _get_task_data_from_asset( - self, asset_doc, task_name, project_task_types + def _get_task_data_from_entity( + self, task_entity, task_types_by_name ): """ Args: - asset_doc (Union[dict[str, Any], None]): Asset document. - task_name (Union[str, None]): Task name. - project_task_types (dict[str, dict[str, Any]]): Project task + task_entity (Union[dict[str, Any], None]): Task entity. + task_types_by_name (dict[str, dict[str, Any]]): Project task types. Returns: Union[dict[str, str], None]: Task data or None if not found. """ - if not asset_doc or not task_name: + if not task_entity: return None - asset_tasks = asset_doc["data"]["tasks"] - task_type = asset_tasks.get(task_name, {}).get("type") + task_type = task_entity["taskType"] task_code = ( - project_task_types + task_types_by_name .get(task_type, {}) - .get("short_name") + .get("shortName") ) return { - "name": task_name, + "name": task_entity["name"], "type": task_type, "short": task_code } - def _find_tasks_info_in_hierarchy(self, hierarchy_context, asset_name): + def _find_tasks_info_in_hierarchy(self, hierarchy_context, folder_name): """Find tasks info for an asset in editorial hierarchy. Args: hierarchy_context (dict[str, Any]): Editorial hierarchy context. - asset_name (str): Asset name. + folder_name (str): Folder name. Returns: dict[str, dict[str, Any]]: Tasks info by name. @@ -418,9 +529,9 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): hierarchy_queue.append(copy.deepcopy(hierarchy_context)) while hierarchy_queue: item = hierarchy_queue.popleft() - if asset_name in item: - return item[asset_name].get("tasks") or {} + if folder_name in item: + return item[folder_name].get("tasks") or {} for subitem in item.values(): - hierarchy_queue.extend(subitem.get("childs") or []) + hierarchy_queue.extend(subitem.get("children") or []) return {} diff --git a/client/ayon_core/plugins/publish/collect_audio.py b/client/ayon_core/plugins/publish/collect_audio.py index 357dad76d4..c1633e414e 100644 --- a/client/ayon_core/plugins/publish/collect_audio.py +++ b/client/ayon_core/plugins/publish/collect_audio.py @@ -1,18 +1,13 @@ import collections + +import ayon_api import pyblish.api -from ayon_core.client import ( - get_assets, - get_subsets, - get_last_versions, - get_representations, - get_asset_name_identifier, -) from ayon_core.pipeline.load import get_representation_path_with_anatomy class CollectAudio(pyblish.api.ContextPlugin): - """Collect asset's last published audio. + """Collect folders's last published audio. The audio product name searched for is defined in: project settings > Collect Audio @@ -23,7 +18,7 @@ class CollectAudio(pyblish.api.ContextPlugin): converted to context plugin which requires only 4 queries top. """ - label = "Collect Asset Audio" + label = "Collect Folder Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] hosts = [ @@ -40,7 +35,10 @@ class CollectAudio(pyblish.api.ContextPlugin): "webpublisher", "aftereffects", "flame", - "unreal" + "unreal", + "blender", + "houdini", + "max", ] audio_product_name = "audioMain" @@ -64,36 +62,36 @@ class CollectAudio(pyblish.api.ContextPlugin): return # Add audio to instance if exists. - instances_by_asset_name = collections.defaultdict(list) + instances_by_folder_path = collections.defaultdict(list) for instance in filtered_instances: - asset_name = instance.data["folderPath"] - instances_by_asset_name[asset_name].append(instance) + folder_path = instance.data["folderPath"] + instances_by_folder_path[folder_path].append(instance) - asset_names = set(instances_by_asset_name.keys()) + folder_paths = set(instances_by_folder_path.keys()) self.log.debug(( - "Searching for audio product '{product}' in assets {assets}" + "Searching for audio product '{product}' in folders {folders}" ).format( product=self.audio_product_name, - assets=", ".join([ - '"{}"'.format(asset_name) - for asset_name in asset_names + folders=", ".join([ + '"{}"'.format(folder_path) + for folder_path in folder_paths ]) )) # Query all required documents project_name = context.data["projectName"] anatomy = context.data["anatomy"] - repre_docs_by_asset_names = self.query_representations( - project_name, asset_names) + repre_entities_by_folder_paths = self.query_representations( + project_name, folder_paths) - for asset_name, instances in instances_by_asset_name.items(): - repre_docs = repre_docs_by_asset_names[asset_name] - if not repre_docs: + for folder_path, instances in instances_by_folder_path.items(): + repre_entities = repre_entities_by_folder_paths[folder_path] + if not repre_entities: continue - repre_doc = repre_docs[0] + repre_entity = repre_entities[0] repre_path = get_representation_path_with_anatomy( - repre_doc, anatomy + repre_entity, anatomy ) for instance in instances: instance.data["audio"] = [{ @@ -103,7 +101,7 @@ class CollectAudio(pyblish.api.ContextPlugin): self.log.debug("Audio Data added to instance ...") def query_representations(self, project_name, folder_paths): - """Query representations related to audio products for passed assets. + """Query representations related to audio products for passed folders. Args: project_name (str): Project in which we're looking for all @@ -113,67 +111,74 @@ class CollectAudio(pyblish.api.ContextPlugin): Returns: collections.defaultdict[str, List[Dict[Str, Any]]]: Representations - related to audio products by asset name. - """ + related to audio products by folder path. + """ output = collections.defaultdict(list) - # Query asset documents - asset_docs = get_assets( + # Skip the queries if audio product name is not defined + if not self.audio_product_name: + return output + + # Query folder entities + folder_entities = ayon_api.get_folders( project_name, - asset_names=folder_paths, - fields=["_id", "name", "data.parents"] + folder_paths=folder_paths, + fields={"id", "path"} ) folder_id_by_path = { - get_asset_name_identifier(asset_doc): asset_doc["_id"] - for asset_doc in asset_docs + folder_entity["path"]: folder_entity["id"] + for folder_entity in folder_entities } folder_ids = set(folder_id_by_path.values()) # Query products with name define by 'audio_product_name' attr - # - one or none products with the name should be available on an asset - subset_docs = get_subsets( + # - one or none products with the name should be available on + # an folder + product_entities = ayon_api.get_products( project_name, - subset_names=[self.audio_product_name], - asset_ids=folder_ids, - fields=["_id", "parent"] + product_names=[self.audio_product_name], + folder_ids=folder_ids, + fields={"id", "folderId"} ) product_id_by_folder_id = {} - for subset_doc in subset_docs: - folder_id = subset_doc["parent"] - product_id_by_folder_id[folder_id] = subset_doc["_id"] + for product_entity in product_entities: + folder_id = product_entity["folderId"] + product_id_by_folder_id[folder_id] = product_entity["id"] product_ids = set(product_id_by_folder_id.values()) if not product_ids: return output # Find all latest versions for the products - version_docs_by_product_id = get_last_versions( - project_name, subset_ids=product_ids, fields=["_id", "parent"] + last_versions_by_product_id = ayon_api.get_last_versions( + project_name, product_ids=product_ids, fields={"id", "productId"} ) version_id_by_product_id = { - product_id: version_doc["_id"] - for product_id, version_doc in version_docs_by_product_id.items() + product_id: version_entity["id"] + for product_id, version_entity in ( + last_versions_by_product_id.items() + ) } version_ids = set(version_id_by_product_id.values()) if not version_ids: return output # Find representations under latest versions of audio products - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, version_ids=version_ids ) - repre_docs_by_version_id = collections.defaultdict(list) - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - repre_docs_by_version_id[version_id].append(repre_doc) + repre_entities_by_version_id = collections.defaultdict(list) + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + repre_entities_by_version_id[version_id].append(repre_entity) - if not repre_docs_by_version_id: + if not repre_entities_by_version_id: return output for folder_path in folder_paths: folder_id = folder_id_by_path.get(folder_path) product_id = product_id_by_folder_id.get(folder_id) version_id = version_id_by_product_id.get(product_id) - output[folder_path] = repre_docs_by_version_id[version_id] + output[folder_path] = repre_entities_by_version_id[version_id] return output diff --git a/client/ayon_core/plugins/publish/collect_context_entities.py b/client/ayon_core/plugins/publish/collect_context_entities.py index 64ef73e2d9..f340178e4f 100644 --- a/client/ayon_core/plugins/publish/collect_context_entities.py +++ b/client/ayon_core/plugins/publish/collect_context_entities.py @@ -2,18 +2,20 @@ Requires: context -> projectName - context -> asset + context -> folderPath context -> task Provides: - context -> projectEntity - Project document from database. - context -> assetEntity - Asset document from database only if 'asset' is - set in context. + context -> projectEntity - Project entity from AYON server. + context -> folderEntity - Folder entity from AYON server only if + 'folderPath' is set in context data. + context -> taskEntity - Task entity from AYON server only if 'folderPath' + and 'task' are set in context data. """ import pyblish.api +import ayon_api -from ayon_core.client import get_project, get_asset_by_name from ayon_core.pipeline import KnownPublishError @@ -25,45 +27,48 @@ class CollectContextEntities(pyblish.api.ContextPlugin): def process(self, context): project_name = context.data["projectName"] - asset_name = context.data["folderPath"] + folder_path = context.data["folderPath"] task_name = context.data["task"] - project_entity = get_project(project_name) + project_entity = ayon_api.get_project(project_name) if not project_entity: raise KnownPublishError( - "Project '{0}' was not found.".format(project_name) + "Project '{}' was not found.".format(project_name) ) self.log.debug("Collected Project \"{}\"".format(project_entity)) context.data["projectEntity"] = project_entity - if not asset_name: + if not folder_path: self.log.info("Context is not set. Can't collect global data.") return - asset_entity = get_asset_by_name(project_name, asset_name) - assert asset_entity, ( - "No asset found by the name '{0}' in project '{1}'" - ).format(asset_name, project_name) + folder_entity = self._get_folder_entity(project_name, folder_path) + self.log.debug("Collected Folder \"{}\"".format(folder_entity)) - self.log.debug("Collected Asset \"{}\"".format(asset_entity)) + task_entity = self._get_task_entity( + project_name, folder_entity, task_name + ) + self.log.debug("Collected Task \"{}\"".format(task_entity)) - context.data["assetEntity"] = asset_entity + context.data["folderEntity"] = folder_entity + context.data["taskEntity"] = task_entity - data = asset_entity['data'] + folder_attributes = folder_entity["attrib"] # Task type - asset_tasks = data.get("tasks") or {} - task_info = asset_tasks.get(task_name) or {} - task_type = task_info.get("type") + task_type = None + if task_entity: + task_type = task_entity["taskType"] + context.data["taskType"] = task_type - frame_start = data.get("frameStart") + frame_start = folder_attributes.get("frameStart") if frame_start is None: frame_start = 1 self.log.warning("Missing frame start. Defaulting to 1.") - frame_end = data.get("frameEnd") + frame_end = folder_attributes.get("frameEnd") if frame_end is None: frame_end = 2 self.log.warning("Missing frame end. Defaulting to 2.") @@ -71,8 +76,8 @@ class CollectContextEntities(pyblish.api.ContextPlugin): context.data["frameStart"] = frame_start context.data["frameEnd"] = frame_end - handle_start = data.get("handleStart") or 0 - handle_end = data.get("handleEnd") or 0 + handle_start = folder_attributes.get("handleStart") or 0 + handle_end = folder_attributes.get("handleEnd") or 0 context.data["handleStart"] = int(handle_start) context.data["handleEnd"] = int(handle_end) @@ -82,4 +87,30 @@ class CollectContextEntities(pyblish.api.ContextPlugin): context.data["frameStartHandle"] = frame_start_h context.data["frameEndHandle"] = frame_end_h - context.data["fps"] = data["fps"] + context.data["fps"] = folder_attributes["fps"] + + def _get_folder_entity(self, project_name, folder_path): + if not folder_path: + return None + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + if not folder_entity: + raise KnownPublishError( + "Folder '{}' was not found in project '{}'.".format( + folder_path, project_name + ) + ) + return folder_entity + + def _get_task_entity(self, project_name, folder_entity, task_name): + if not folder_entity or not task_name: + return None + task_entity = ayon_api.get_task_by_name( + project_name, folder_entity["id"], task_name + ) + if not task_entity: + task_path = "/".join([folder_entity["path"], task_name]) + raise KnownPublishError( + "Task '{}' was not found in project '{}'.".format( + task_path, project_name) + ) + return task_entity \ No newline at end of file diff --git a/client/ayon_core/plugins/publish/collect_current_context.py b/client/ayon_core/plugins/publish/collect_current_context.py index 76d30a913e..76181ffc39 100644 --- a/client/ayon_core/plugins/publish/collect_current_context.py +++ b/client/ayon_core/plugins/publish/collect_current_context.py @@ -21,14 +21,14 @@ class CollectCurrentContext(pyblish.api.ContextPlugin): def process(self, context): # Check if values are already set project_name = context.data.get("projectName") - asset_name = context.data.get("folderPath") + folder_path = context.data.get("folderPath") task_name = context.data.get("task") current_context = get_current_context() if not project_name: context.data["projectName"] = current_context["project_name"] - if not asset_name: + if not folder_path: context.data["folderPath"] = current_context["folder_path"] if not task_name: @@ -40,10 +40,10 @@ class CollectCurrentContext(pyblish.api.ContextPlugin): self.log.info(( "Collected project context\n" "Project: {project_name}\n" - "Asset: {asset_name}\n" + "Folder: {folder_path}\n" "Task: {task_name}" ).format( project_name=context.data["projectName"], - asset_name=context.data["folderPath"], + folder_path=context.data["folderPath"], task_name=context.data["task"] )) diff --git a/client/ayon_core/plugins/publish/collect_custom_staging_dir.py b/client/ayon_core/plugins/publish/collect_custom_staging_dir.py index e42f34b0ae..49c3a98dd2 100644 --- a/client/ayon_core/plugins/publish/collect_custom_staging_dir.py +++ b/client/ayon_core/plugins/publish/collect_custom_staging_dir.py @@ -28,7 +28,7 @@ class CollectCustomStagingDir(pyblish.api.InstancePlugin): Location of the folder is configured in `project_anatomy/templates/others`. ('transient' key is expected, with 'folder' key) - Which family/task type/subset is applicable is configured in: + Which family/task type/product is applicable is configured in: `project_settings/global/tools/publish/custom_staging_dir_profiles` """ diff --git a/client/ayon_core/plugins/publish/collect_frames_fix.py b/client/ayon_core/plugins/publish/collect_frames_fix.py index 0fe86b8d70..0f7d5b692a 100644 --- a/client/ayon_core/plugins/publish/collect_frames_fix.py +++ b/client/ayon_core/plugins/publish/collect_frames_fix.py @@ -1,14 +1,11 @@ import pyblish.api +import ayon_api + from ayon_core.lib.attribute_definitions import ( TextDef, BoolDef ) - from ayon_core.pipeline.publish import AYONPyblishPluginMixin -from ayon_core.client.entities import ( - get_last_version_by_subset_name, - get_representations -) class CollectFramesFixDef( @@ -41,28 +38,34 @@ class CollectFramesFixDef( instance.data["frames_to_fix"] = frames_to_fix product_name = instance.data["productName"] - asset_name = instance.data["folderPath"] + folder_entity = instance.data["folderEntity"] project_entity = instance.data["projectEntity"] project_name = project_entity["name"] - version = get_last_version_by_subset_name( + version_entity = ayon_api.get_last_version_by_product_name( project_name, product_name, - asset_name=asset_name + folder_entity["id"] ) - if not version: + if not version_entity: self.log.warning( "No last version found, re-render not possible" ) return - representations = get_representations( - project_name, version_ids=[version["_id"]] + representations = ayon_api.get_representations( + project_name, version_ids={version_entity["id"]} ) published_files = [] for repre in representations: - if repre["context"]["family"] not in self.families: + # TODO get product type from product entity instead of + # representation 'context' data. + repre_context = repre["context"] + product_type = repre_context.get("product", {}).get("type") + if not product_type: + product_type = repre_context.get("family") + if product_type not in self.families: continue for file_info in repre.get("files"): @@ -73,7 +76,7 @@ class CollectFramesFixDef( instance.data["last_version_published_files"])) if self.rewrite_version_enable and rewrite_version: - instance.data["version"] = version["name"] + instance.data["version"] = version_entity["version"] # limits triggering version validator instance.data.pop("latestVersion") diff --git a/client/ayon_core/plugins/publish/collect_from_create_context.py b/client/ayon_core/plugins/publish/collect_from_create_context.py index 8218806c4c..b99866fed9 100644 --- a/client/ayon_core/plugins/publish/collect_from_create_context.py +++ b/client/ayon_core/plugins/publish/collect_from_create_context.py @@ -53,11 +53,11 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): context.data.update(create_context.context_data_to_store()) context.data["newPublishing"] = True # Update context data - asset_name = create_context.get_current_asset_name() + folder_path = create_context.get_current_folder_path() task_name = create_context.get_current_task_name() for key, value in ( ("AYON_PROJECT_NAME", project_name), - ("AYON_FOLDER_PATH", asset_name), + ("AYON_FOLDER_PATH", folder_path), ("AYON_TASK_NAME", task_name) ): if value is None: diff --git a/client/ayon_core/plugins/publish/collect_hierarchy.py b/client/ayon_core/plugins/publish/collect_hierarchy.py index 8ba83d582f..2ae3cc67f3 100644 --- a/client/ayon_core/plugins/publish/collect_hierarchy.py +++ b/client/ayon_core/plugins/publish/collect_hierarchy.py @@ -17,17 +17,18 @@ class CollectHierarchy(pyblish.api.ContextPlugin): hosts = ["resolve", "hiero", "flame"] def process(self, context): - temp_context = {} project_name = context.data["projectName"] - final_context = {} - final_context[project_name] = {} - final_context[project_name]['entity_type'] = 'Project' - + final_context = { + project_name: { + "entity_type": "project", + "children": {} + }, + } + temp_context = {} for instance in context: self.log.debug("Processing instance: `{}` ...".format(instance)) # shot data dict - shot_data = {} product_type = instance.data["productType"] families = instance.data["families"] @@ -41,39 +42,38 @@ class CollectHierarchy(pyblish.api.ContextPlugin): if not instance.data.get("heroTrack"): continue - # get asset build data if any available - shot_data["inputs"] = [ - x["_id"] for x in instance.data.get("assetbuilds", []) - ] - - # suppose that all instances are Shots - shot_data['entity_type'] = 'Shot' - shot_data['tasks'] = instance.data.get("tasks") or {} - shot_data["comments"] = instance.data.get("comments", []) - - shot_data['custom_attributes'] = { - "handleStart": instance.data["handleStart"], - "handleEnd": instance.data["handleEnd"], - "frameStart": instance.data["frameStart"], - "frameEnd": instance.data["frameEnd"], - "clipIn": instance.data["clipIn"], - "clipOut": instance.data["clipOut"], - "fps": instance.data["fps"], - "resolutionWidth": instance.data["resolutionWidth"], - "resolutionHeight": instance.data["resolutionHeight"], - "pixelAspect": instance.data["pixelAspect"] + shot_data = { + "entity_type": "folder", + # WARNING Default folder type is hardcoded + # suppose that all instances are Shots + "folder_type": "Shot", + "tasks": instance.data.get("tasks") or {}, + "comments": instance.data.get("comments", []), + "attributes": { + "handleStart": instance.data["handleStart"], + "handleEnd": instance.data["handleEnd"], + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + "clipIn": instance.data["clipIn"], + "clipOut": instance.data["clipOut"], + "fps": instance.data["fps"], + "resolutionWidth": instance.data["resolutionWidth"], + "resolutionHeight": instance.data["resolutionHeight"], + "pixelAspect": instance.data["pixelAspect"], + }, } # Split by '/' for AYON where asset is a path name = instance.data["folderPath"].split("/")[-1] actual = {name: shot_data} for parent in reversed(instance.data["parents"]): - next_dict = {} - parent_name = parent["entity_name"] - next_dict[parent_name] = {} - next_dict[parent_name]["entity_type"] = parent[ - "entity_type"].capitalize() - next_dict[parent_name]["childs"] = actual + next_dict = { + parent["entity_name"]: { + "entity_type": "folder", + "folder_type": parent["folder_type"], + "children": actual, + } + } actual = next_dict temp_context = self._update_dict(temp_context, actual) @@ -82,7 +82,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): if not temp_context: return - final_context[project_name]['childs'] = temp_context + final_context[project_name]["children"] = temp_context # adding hierarchy context to context context.data["hierarchyContext"] = final_context @@ -90,8 +90,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): context.data["hierarchyContext"])) def _update_dict(self, parent_dict, child_dict): - """ - Nesting each children into its parent. + """Nesting each child into its parent. Args: parent_dict (dict): parent dict wich should be nested with children diff --git a/client/ayon_core/plugins/publish/collect_host_name.py b/client/ayon_core/plugins/publish/collect_host_name.py index e76579bbd2..ea4ec7ad41 100644 --- a/client/ayon_core/plugins/publish/collect_host_name.py +++ b/client/ayon_core/plugins/publish/collect_host_name.py @@ -1,14 +1,13 @@ """ Requires: None + Provides: - context -> host (str) + context -> hostName (str) """ import os import pyblish.api -from ayon_core.lib import ApplicationManager - class CollectHostName(pyblish.api.ContextPlugin): """Collect avalon host name to context.""" @@ -18,30 +17,8 @@ class CollectHostName(pyblish.api.ContextPlugin): def process(self, context): host_name = context.data.get("hostName") - app_name = context.data.get("appName") - app_label = context.data.get("appLabel") - # Don't override value if is already set - if host_name and app_name and app_label: + if host_name: return # Use AYON_HOST_NAME to get host name if available - if not host_name: - host_name = os.environ.get("AYON_HOST_NAME") - - # Use AYON_APP_NAME to get full app name - if not app_name: - app_name = os.environ.get("AYON_APP_NAME") - - # Fill missing values based on app full name - if (not host_name or not app_label) and app_name: - app_manager = ApplicationManager() - app = app_manager.applications.get(app_name) - if app: - if not host_name: - host_name = app.host_name - if not app_label: - app_label = app.full_label - - context.data["hostName"] = host_name - context.data["appName"] = app_name - context.data["appLabel"] = app_label + context.data["hostName"] = os.environ.get("AYON_HOST_NAME") diff --git a/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py b/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py index 6caee1be6a..770f3470c6 100644 --- a/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py +++ b/client/ayon_core/plugins/publish/collect_input_representations_to_versions.py @@ -1,7 +1,6 @@ +import ayon_api import pyblish.api -from ayon_core.client import get_representations - class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin): """Converts collected input representations to input versions. @@ -24,14 +23,14 @@ class CollectInputRepresentationsToVersions(pyblish.api.ContextPlugin): if inst_repre: representations.update(inst_repre) - representations_docs = get_representations( - project_name=context.data["projectEntity"]["name"], + repre_entities = ayon_api.get_representations( + project_name=context.data["projectName"], representation_ids=representations, - fields=["_id", "parent"]) + fields={"id", "versionId"}) representation_id_to_version_id = { - str(repre["_id"]): repre["parent"] - for repre in representations_docs + repre["id"]: repre["versionId"] + for repre in repre_entities } for instance in context: diff --git a/client/ayon_core/plugins/publish/collect_otio_subset_resources.py b/client/ayon_core/plugins/publish/collect_otio_subset_resources.py index 3f47e6e3bf..37a5e87a7a 100644 --- a/client/ayon_core/plugins/publish/collect_otio_subset_resources.py +++ b/client/ayon_core/plugins/publish/collect_otio_subset_resources.py @@ -43,8 +43,10 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin): template_name = self.get_template_name(instance) anatomy = instance.context.data["anatomy"] - publish_template_category = anatomy.templates[template_name] - template = os.path.normpath(publish_template_category["path"]) + publish_path_template = anatomy.get_template_item( + "publish", template_name, "path" + ).template + template = os.path.normpath(publish_path_template) self.log.debug( ">> template: {}".format(template)) diff --git a/client/ayon_core/plugins/publish/collect_rendered_files.py b/client/ayon_core/plugins/publish/collect_rendered_files.py index ca88a7aa82..8a60e7619d 100644 --- a/client/ayon_core/plugins/publish/collect_rendered_files.py +++ b/client/ayon_core/plugins/publish/collect_rendered_files.py @@ -36,18 +36,18 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): def _load_json(self, path): path = path.strip('\"') - assert os.path.isfile(path), ( - "Path to json file doesn't exist. \"{}\"".format(path) - ) + + if not os.path.isfile(path): + raise FileNotFoundError( + f"Path to json file doesn't exist. \"{path}\"") + data = None with open(path, "r") as json_file: try: data = json.load(json_file) except Exception as exc: self.log.error( - "Error loading json: " - "{} - Exception: {}".format(path, exc) - ) + "Error loading json: %s - Exception: %s", path, exc) return data def _fill_staging_dir(self, data_object, anatomy): @@ -73,30 +73,23 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin): data_err = "invalid json file - missing data" required = ["user", "comment", "job", "instances", "version"] - assert all(elem in data.keys() for elem in required), data_err + + if any(elem not in data for elem in required): + raise ValueError(data_err) + if "folderPath" not in data and "asset" not in data: - raise AssertionError(data_err) + raise ValueError(data_err) if "folderPath" not in data: data["folderPath"] = data.pop("asset") - # set context by first json file - ctx = self._context.data - - ctx["folderPath"] = ctx.get("folderPath") or data.get("folderPath") - ctx["intent"] = ctx.get("intent") or data.get("intent") - ctx["comment"] = ctx.get("comment") or data.get("comment") - ctx["user"] = ctx.get("user") or data.get("user") - ctx["version"] = ctx.get("version") or data.get("version") - - # basic sanity check to see if we are working in same context - # if some other json file has different context, bail out. - ctx_err = "inconsistent contexts in json files - %s" - assert ctx.get("folderPath") == data.get("folderPath"), ctx_err % "folderPath" - assert ctx.get("intent") == data.get("intent"), ctx_err % "intent" - assert ctx.get("comment") == data.get("comment"), ctx_err % "comment" - assert ctx.get("user") == data.get("user"), ctx_err % "user" - assert ctx.get("version") == data.get("version"), ctx_err % "version" + # ftrack credentials are passed as environment variables by Deadline + # to publish job, but Muster doesn't pass them. + if data.get("ftrack") and not os.environ.get("FTRACK_API_USER"): + ftrack = data.get("ftrack") + os.environ["FTRACK_API_USER"] = ftrack["FTRACK_API_USER"] + os.environ["FTRACK_API_KEY"] = ftrack["FTRACK_API_KEY"] + os.environ["FTRACK_SERVER"] = ftrack["FTRACK_SERVER"] # now we can just add instances from json file and we are done any_staging_dir_persistent = False diff --git a/client/ayon_core/plugins/publish/collect_resources_path.py b/client/ayon_core/plugins/publish/collect_resources_path.py index 6a871124f1..959523918e 100644 --- a/client/ayon_core/plugins/publish/collect_resources_path.py +++ b/client/ayon_core/plugins/publish/collect_resources_path.py @@ -79,23 +79,12 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "representation": "TEMP" }) - publish_templates = anatomy.templates_obj["publish"] - if "folder" in publish_templates: - publish_folder = publish_templates["folder"].format_strict( - template_data - ) - else: - # solve deprecated situation when `folder` key is not underneath - # `publish` anatomy - self.log.warning(( - "Deprecation warning: Anatomy does not have set `folder`" - " key underneath `publish` (in global of for project `{}`)." - ).format(anatomy.project_name)) - - file_path = publish_templates["path"].format_strict(template_data) - publish_folder = os.path.dirname(file_path) - - publish_folder = os.path.normpath(publish_folder) + publish_templates = anatomy.get_template_item( + "publish", "default", "directory" + ) + publish_folder = os.path.normpath( + publish_templates.format_strict(template_data) + ) resources_folder = os.path.join(publish_folder, "resources") instance.data["publishDir"] = publish_folder diff --git a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py index c1326f164d..1267c009e7 100644 --- a/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py +++ b/client/ayon_core/plugins/publish/collect_scene_loaded_versions.py @@ -1,6 +1,6 @@ +import ayon_api import pyblish.api -from ayon_core.client import get_representations from ayon_core.pipeline import registered_host @@ -42,22 +42,22 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): } project_name = context.data["projectName"] - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, representation_ids=repre_ids, - fields=["_id", "parent"] + fields={"id", "versionId"} ) - repre_doc_by_str_id = { - str(doc["_id"]): doc - for doc in repre_docs + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in repre_entities } # QUESTION should we add same representation id when loaded multiple # times? for con in containers: repre_id = con["representation"] - repre_doc = repre_doc_by_str_id.get(repre_id) - if repre_doc is None: + repre_entity = repre_entities_by_id.get(repre_id) + if repre_entity is None: self.log.warning(( "Skipping container," " did not find representation document. {}" @@ -68,8 +68,8 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin): # may have more then one representation that are same version version = { "container_name": con["name"], - "representation_id": repre_doc["_id"], - "version_id": repre_doc["parent"], + "representation_id": repre_entity["id"], + "version_id": repre_entity["versionId"], } loaded_versions.append(version) diff --git a/client/ayon_core/plugins/publish/collect_settings.py b/client/ayon_core/plugins/publish/collect_settings.py index 66b89a114c..db58e7eaa9 100644 --- a/client/ayon_core/plugins/publish/collect_settings.py +++ b/client/ayon_core/plugins/publish/collect_settings.py @@ -1,5 +1,5 @@ from pyblish import api -from ayon_core.settings import get_current_project_settings +from ayon_core.settings import get_project_settings class CollectSettings(api.ContextPlugin): @@ -9,4 +9,9 @@ class CollectSettings(api.ContextPlugin): label = "Collect Settings" def process(self, context): - context.data["project_settings"] = get_current_project_settings() + project_name = context.data["projectName"] + self.log.debug( + "Collecting settings for project: {}".format(project_name) + ) + project_settings = get_project_settings(project_name) + context.data["project_settings"] = project_settings diff --git a/client/ayon_core/plugins/publish/extract_burnin.py b/client/ayon_core/plugins/publish/extract_burnin.py index ab6353a29f..93774842ca 100644 --- a/client/ayon_core/plugins/publish/extract_burnin.py +++ b/client/ayon_core/plugins/publish/extract_burnin.py @@ -27,7 +27,7 @@ class ExtractBurnin(publish.Extractor): Extractor to create video with pre-defined burnins from existing extracted video representation. - It will work only on represenations having `burnin = True` or + It will work only on representations having `burnin = True` or `tags` including `burnin` """ @@ -125,7 +125,7 @@ class ExtractBurnin(publish.Extractor): burnin_defs = copy.deepcopy(src_burnin_defs) - # Filter output definition by `burnin` represetation key + # Filter output definition by `burnin` representation key repre_linked_burnins = [ burnin_def for burnin_def in burnin_defs @@ -194,6 +194,16 @@ class ExtractBurnin(publish.Extractor): ).format(host_name, product_type, task_name, profile)) return + burnins_per_repres = self._get_burnins_per_representations( + instance, burnin_defs + ) + if not burnins_per_repres: + self.log.debug( + "Skipped instance. No representations found matching a burnin" + "definition in: %s", burnin_defs + ) + return + burnin_options = self._get_burnin_options() # Prepare basic data for processing @@ -204,9 +214,6 @@ class ExtractBurnin(publish.Extractor): # Args that will execute the script executable_args = ["run", scriptpath] - burnins_per_repres = self._get_burnins_per_representations( - instance, burnin_defs - ) for repre, repre_burnin_defs in burnins_per_repres: # Create copy of `_burnin_data` and `_temp_data` for repre. burnin_data = copy.deepcopy(_burnin_data) @@ -371,6 +378,7 @@ class ExtractBurnin(publish.Extractor): # Prepare subprocess arguments args = list(executable_args) args.append(temporary_json_filepath) + args.append("--headless") self.log.debug("Executing: {}".format(" ".join(args))) # Run burnin script @@ -540,7 +548,7 @@ class ExtractBurnin(publish.Extractor): return burnin_data, temp_data def repres_is_valid(self, repre): - """Validation if representaion should be processed. + """Validation if representation should be processed. Args: repre (dict): Representation which should be checked. @@ -572,7 +580,7 @@ class ExtractBurnin(publish.Extractor): tags (list): Tags of processed representation. Returns: - list: Containg all burnin definitions matching entered tags. + list: Contain all burnin definitions matching entered tags. """ filtered_burnins = [] @@ -597,7 +605,7 @@ class ExtractBurnin(publish.Extractor): Store data to `temp_data` for keys "full_input_path" which is full path to source files optionally with sequence formatting, - "full_output_path" full path to otput with optionally with sequence + "full_output_path" full path to output with optionally with sequence formatting, "full_input_paths" list of all source files which will be deleted when burnin script ends, "repre_files" list of output filenames. @@ -747,7 +755,7 @@ class ExtractBurnin(publish.Extractor): profile (dict): Profile from presets matching current context. Returns: - list: Containg all valid output definitions. + list: Contain all valid output definitions. """ filtered_burnin_defs = [] @@ -768,7 +776,7 @@ class ExtractBurnin(publish.Extractor): ): self.log.debug(( "Skipped burnin definition \"{}\". Family" - " fiters ({}) does not match current instance families: {}" + " filters ({}) does not match current instance families: {}" ).format( filename_suffix, str(families_filters), str(families) )) diff --git a/client/ayon_core/plugins/publish/extract_color_transcode.py b/client/ayon_core/plugins/publish/extract_color_transcode.py index b5ddebe05b..1130c575a3 100644 --- a/client/ayon_core/plugins/publish/extract_color_transcode.py +++ b/client/ayon_core/plugins/publish/extract_color_transcode.py @@ -257,6 +257,7 @@ class ExtractOIIOTranscode(publish.Extractor): return new_repre["ext"] = output_extension + new_repre["outputName"] = output_name renamed_files = [] for file_name in files_to_convert: diff --git a/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py b/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py index 7ceaf7d2ad..60c92aa8b1 100644 --- a/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py +++ b/client/ayon_core/plugins/publish/extract_hierarchy_to_ayon.py @@ -4,12 +4,11 @@ import json import uuid import pyblish.api -from ayon_api import slugify_string +from ayon_api import slugify_string, get_folders, get_tasks from ayon_api.entity_hub import EntityHub -from ayon_core.client import get_assets, get_asset_name_identifier from ayon_core.pipeline.template_data import ( - get_asset_template_data, + get_folder_template_data, get_task_template_data, ) @@ -35,38 +34,74 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): self._fill_instance_entities(context, project_name) def _fill_instance_entities(self, context, project_name): - instances_by_asset_name = collections.defaultdict(list) + instances_by_folder_path = collections.defaultdict(list) for instance in context: if instance.data.get("publish") is False: continue - instance_entity = instance.data.get("assetEntity") + instance_entity = instance.data.get("folderEntity") if instance_entity: continue - # Skip if instance asset does not match - instance_asset_name = instance.data.get("folderPath") - instances_by_asset_name[instance_asset_name].append(instance) + folder_path = instance.data.get("folderPath") + instances_by_folder_path[folder_path].append(instance) - project_doc = context.data["projectEntity"] - asset_docs = get_assets( - project_name, asset_names=instances_by_asset_name.keys() + project_entity = context.data["projectEntity"] + folder_entities = get_folders( + project_name, folder_paths=instances_by_folder_path.keys() ) - asset_docs_by_name = { - get_asset_name_identifier(asset_doc): asset_doc - for asset_doc in asset_docs + folder_entities_by_path = { + folder_entity["path"]: folder_entity + for folder_entity in folder_entities } - for asset_name, instances in instances_by_asset_name.items(): - asset_doc = asset_docs_by_name[asset_name] - asset_data = get_asset_template_data(asset_doc, project_name) + all_task_names = set() + folder_ids = set() + # Fill folderEntity and prepare data for task entities + for folder_path, instances in instances_by_folder_path.items(): + folder_entity = folder_entities_by_path[folder_path] + folder_ids.add(folder_entity["id"]) for instance in instances: task_name = instance.data.get("task") - template_data = get_task_template_data( - project_doc, asset_doc, task_name) - template_data.update(copy.deepcopy(asset_data)) + all_task_names.add(task_name) + + # Query task entities + # Discard 'None' task names + all_task_names.discard(None) + tasks_by_name_by_folder_id = { + folder_id: {} for folder_id in folder_ids + } + task_entities = [] + if all_task_names: + task_entities = get_tasks( + project_name, + task_names=all_task_names, + folder_ids=folder_ids, + ) + for task_entity in task_entities: + task_name = task_entity["name"] + folder_id = task_entity["folderId"] + tasks_by_name_by_folder_id[folder_id][task_name] = task_entity + + for folder_path, instances in instances_by_folder_path.items(): + folder_entity = folder_entities_by_path[folder_path] + folder_id = folder_entity["id"] + folder_data = get_folder_template_data( + folder_entity, project_name + ) + task_entities_by_name = tasks_by_name_by_folder_id[folder_id] + for instance in instances: + task_name = instance.data.get("task") + task_entity = task_entities_by_name.get(task_name) + template_data = {} + if task_entity: + template_data = get_task_template_data( + project_entity, task_entity + ) + template_data.update(copy.deepcopy(folder_data)) instance.data["anatomyData"].update(template_data) - instance.data["assetEntity"] = asset_doc + instance.data["folderEntity"] = folder_entity + instance.data["taskEntity"] = task_entity def _create_hierarchy(self, context, project_name): hierarchy_context = self._filter_hierarchy(context) @@ -80,6 +115,10 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): entity_hub = EntityHub(project_name) project = entity_hub.project_entity + folder_type_name_by_low_name = { + folder_type_item["name"].lower(): folder_type_item["name"] + for folder_type_item in project.get_folder_types() + } hierarchy_match_queue = collections.deque() hierarchy_match_queue.append((project, hierarchy_context)) @@ -132,8 +171,18 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): # TODO check if existing entity have 'folder' type child_entity = children_by_low_name.get(child_name.lower()) if child_entity is None: + folder_type = folder_type_name_by_low_name.get( + child_info["folder_type"].lower() + ) + if folder_type is None: + # TODO add validator for folder type validations + self.log.warning(( + "Couldn't find folder type '{}'" + ).format(child_info["folder_type"])) + folder_type = "Folder" + child_entity = entity_hub.add_new_folder( - child_info["entity_type"], + folder_type, parent_id=entity.id, name=child_name ) @@ -157,7 +206,7 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): Output example: { "name": "MyProject", - "entity_type": "Project", + "entity_type": "project", "attributes": {}, "tasks": [], "children": [ @@ -188,12 +237,11 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): # filter only the active publishing instances active_folder_paths = set() for instance in context: - if instance.data.get("publish") is not False: + if instance.data.get("publish", True) is not False: active_folder_paths.add(instance.data.get("folderPath")) active_folder_paths.discard(None) - self.log.debug("Active folder paths: {}".format(active_folder_paths)) if not active_folder_paths: return None @@ -202,11 +250,11 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): hierarchy_context = copy.deepcopy(context.data["hierarchyContext"]) for key, value in hierarchy_context.items(): project_item = copy.deepcopy(value) - project_children_context = project_item.pop("childs", None) + project_children_context = project_item.pop("children", None) project_item["name"] = key project_item["tasks"] = [] project_item["attributes"] = project_item.pop( - "custom_attributes", {} + "attributes", {} ) project_item["children"] = [] @@ -230,22 +278,23 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin): folder_path = "{}/{}".format(parent_path, folder_name) if ( folder_path not in active_folder_paths - and not folder_info.get("childs") + and not folder_info.get("children") ): continue item_id = uuid.uuid4().hex new_item = copy.deepcopy(folder_info) + new_children_context = new_item.pop("children", None) + tasks = new_item.pop("tasks", {}) + new_item["name"] = folder_name new_item["children"] = [] - new_children_context = new_item.pop("childs", None) - tasks = new_item.pop("tasks", {}) task_items = [] for task_name, task_info in tasks.items(): task_info["name"] = task_name task_items.append(task_info) new_item["tasks"] = task_items - new_item["attributes"] = new_item.pop("custom_attributes", {}) + new_item["attributes"] = new_item.pop("attributes", {}) items_by_id[item_id] = new_item parent_id_by_item_id[item_id] = parent_id diff --git a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py index a19b5b9090..98723beffa 100644 --- a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py +++ b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py @@ -80,7 +80,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin): # create duration duration = (timeline_out_h - timeline_in_h) + 1 - # ffmpeg generate new file only if doesnt exists already + # ffmpeg generate new file only if doesn't exists already if not recycling_file: # convert to seconds start_sec = float(timeline_in_h / fps) diff --git a/client/ayon_core/plugins/publish/extract_review.py b/client/ayon_core/plugins/publish/extract_review.py index 905158c851..1891c25521 100644 --- a/client/ayon_core/plugins/publish/extract_review.py +++ b/client/ayon_core/plugins/publish/extract_review.py @@ -32,6 +32,35 @@ from ayon_core.pipeline.publish import ( from ayon_core.pipeline.publish.lib import add_repre_files_for_cleanup +def frame_to_timecode(frame: int, fps: float) -> str: + """Convert a frame number and FPS to editorial timecode (HH:MM:SS:FF). + + Unlike `ayon_core.pipeline.editorial.frames_to_timecode` this does not + rely on the `opentimelineio` package, so it can be used across hosts that + do not have it available. + + Args: + frame (int): The frame number to be converted. + fps (float): The frames per second of the video. + + Returns: + str: The timecode in HH:MM:SS:FF format. + """ + # Calculate total seconds + total_seconds = frame / fps + + # Extract hours, minutes, and seconds + hours = int(total_seconds // 3600) + minutes = int((total_seconds % 3600) // 60) + seconds = int(total_seconds % 60) + + # Adjust for non-integer FPS by rounding the remaining frames appropriately + remaining_frames = round((total_seconds - int(total_seconds)) * fps) + + # Format and return the timecode + return f"{hours:02d}:{minutes:02d}:{seconds:02d}:{remaining_frames:02d}" + + class ExtractReview(pyblish.api.InstancePlugin): """Extracting Review mov file for Ftrack @@ -390,7 +419,16 @@ class ExtractReview(pyblish.api.InstancePlugin): # add outputName to anatomy format fill_data fill_data.update({ "output": output_name, - "ext": output_ext + "ext": output_ext, + + # By adding `timecode` as data we can use it + # in the ffmpeg arguments for `--timecode` so that editorial + # like Resolve or Premiere can detect the start frame for e.g. + # review output files + "timecode": frame_to_timecode( + frame=temp_data["frame_start_handle"], + fps=float(instance.data["fps"]) + ) }) try: # temporary until oiiotool is supported cross platform @@ -619,7 +657,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Prepare input and output filepaths self.input_output_paths(new_repre, output_def, temp_data) - # Set output frames len to 1 when ouput is single image + # Set output frames len to 1 when output is single image if ( temp_data["output_ext_is_image"] and not temp_data["output_is_sequence"] @@ -955,7 +993,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("New representation ext: `{}`".format(output_ext)) - # Output is image file sequence witht frames + # Output is image file sequence with frames output_ext_is_image = bool(output_ext in self.image_exts) output_is_sequence = bool( output_ext_is_image @@ -967,7 +1005,7 @@ class ExtractReview(pyblish.api.InstancePlugin): frame_end = temp_data["output_frame_end"] filename_base = "{}_{}".format(filename, filename_suffix) - # Temporary tempalte for frame filling. Example output: + # Temporary template for frame filling. Example output: # "basename.%04d.exr" when `frame_end` == 1001 repr_file = "{}.%{:0>2}d.{}".format( filename_base, len(str(frame_end)), output_ext @@ -997,7 +1035,7 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.debug("Creating dir: {}".format(dst_staging_dir)) os.makedirs(dst_staging_dir) - # Store stagingDir to representaion + # Store stagingDir to representation new_repre["stagingDir"] = dst_staging_dir # Store paths to temp data @@ -1225,19 +1263,13 @@ class ExtractReview(pyblish.api.InstancePlugin): filters = [] # if reformat input video file is already reforamted from upstream - reformat_in_baking = bool("reformated" in new_repre["tags"]) + reformat_in_baking = ( + "reformatted" in new_repre["tags"] + # Backwards compatibility + or "reformated" in new_repre["tags"] + ) self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking)) - # Get instance data - pixel_aspect = temp_data["pixel_aspect"] - - if reformat_in_baking: - self.log.debug(( - "Using resolution from input. It is already " - "reformated from upstream process" - )) - pixel_aspect = 1 - # NOTE Skipped using instance's resolution full_input_path_single_file = temp_data["full_input_path_single_file"] try: @@ -1268,7 +1300,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if reformat_in_baking: self.log.debug(( "Using resolution from input. It is already " - "reformated from upstream process" + "reformatted from upstream process" )) pixel_aspect = 1 output_width = input_width @@ -1374,7 +1406,7 @@ class ExtractReview(pyblish.api.InstancePlugin): # Make sure output width and height is not an odd number # When this can happen: # - if output definition has set width and height with odd number - # - `instance.data` contain width and height with odd numbeer + # - `instance.data` contain width and height with odd number if output_width % 2 != 0: self.log.warning(( "Converting output width from odd to even number. {} -> {}" @@ -1555,7 +1587,7 @@ class ExtractReview(pyblish.api.InstancePlugin): custom_tags (list): Custom Tags of processed representation. Returns: - list: Containg all output definitions matching entered tags. + list: Containing all output definitions matching entered tags. """ filtered_outputs = [] @@ -1820,8 +1852,8 @@ class OverscanCrop: """ # crop=width:height:x:y - explicit start x, y position # crop=width:height - x, y are related to center by width/height - # pad=width:heigth:x:y - explicit start x, y position - # pad=width:heigth - x, y are set to 0 by default + # pad=width:height:x:y - explicit start x, y position + # pad=width:height - x, y are set to 0 by default width = self.width() height = self.height() @@ -1869,7 +1901,7 @@ class OverscanCrop: # Replace "px" (and spaces before) with single space string_value = re.sub(r"([ ]+)?px", " ", string_value) string_value = re.sub(r"([ ]+)%", "%", string_value) - # Make sure +/- sign at the beggining of string is next to number + # Make sure +/- sign at the beginning of string is next to number string_value = re.sub(r"^([\+\-])[ ]+", "\g<1>", string_value) # Make sure +/- sign in the middle has zero spaces before number under # which belongs diff --git a/client/ayon_core/plugins/publish/extract_slate_data.py b/client/ayon_core/plugins/publish/extract_slate_data.py new file mode 100644 index 0000000000..750fb5d60a --- /dev/null +++ b/client/ayon_core/plugins/publish/extract_slate_data.py @@ -0,0 +1,22 @@ +import pyblish.api + +from ayon_core.pipeline import publish + + +class ExtractSlateData(publish.Extractor): + """Add slate data for integration.""" + + label = "Slate Data" + # Offset from ExtractReviewSlate and ExtractGenerateSlate. + order = pyblish.api.ExtractorOrder + 0.49 + families = ["slate", "review"] + hosts = ["nuke", "shell"] + + def process(self, instance): + for representation in instance.data.get("representations", []): + if "slate-frame" not in representation.get("tags", []): + continue + + data = representation.get("data", {}) + data["slateFrames"] = 1 + representation["data"] = data diff --git a/client/ayon_core/plugins/publish/extract_thumbnail.py b/client/ayon_core/plugins/publish/extract_thumbnail.py index 84c1000ba3..d1b6e4e0cc 100644 --- a/client/ayon_core/plugins/publish/extract_thumbnail.py +++ b/client/ayon_core/plugins/publish/extract_thumbnail.py @@ -478,7 +478,15 @@ class ExtractThumbnail(pyblish.api.InstancePlugin): # Set video input attributes max_int = str(2147483647) video_data = get_ffprobe_data(video_file_path, logger=self.log) - duration = float(video_data["format"]["duration"]) + # Use duration of the individual streams since it is returned with + # higher decimal precision than 'format.duration'. We need this + # more precise value for calculating the correct amount of frames + # for higher FPS ranges or decimal ranges, e.g. 29.97 FPS + duration = max( + float(stream.get("duration", 0)) + for stream in video_data["streams"] + if stream.get("codec_type") == "video" + ) cmd_args = [ "-y", diff --git a/client/ayon_core/plugins/publish/help/validate_containers.xml b/client/ayon_core/plugins/publish/help/validate_containers.xml index 5d18bb4c19..321e73a303 100644 --- a/client/ayon_core/plugins/publish/help/validate_containers.xml +++ b/client/ayon_core/plugins/publish/help/validate_containers.xml @@ -10,7 +10,7 @@ Scene contains one or more outdated loaded containers, eg. versions loaded into ### How to repair? Use 'Scene Inventory' and update all highlighted old container to latest OR - refresh Publish and switch 'Validate Containers' toggle on 'Options' tab. +refresh Publish and switch 'Validate Containers' toggle on 'Context' tab. WARNING: Skipping this validator will result in publishing (and probably rendering) old version of loaded assets. diff --git a/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml b/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml index a4b289d848..e163fc39fe 100644 --- a/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml +++ b/client/ayon_core/plugins/publish/help/validate_unique_subsets.xml @@ -1,7 +1,7 @@ -Subset not unique +Product not unique ## Clashing product names found diff --git a/client/ayon_core/plugins/publish/integrate.py b/client/ayon_core/plugins/publish/integrate.py index 12c702c93b..764168edd3 100644 --- a/client/ayon_core/plugins/publish/integrate.py +++ b/client/ayon_core/plugins/publish/integrate.py @@ -2,27 +2,24 @@ import os import logging import sys import copy -import datetime import clique import six import pyblish.api - -from ayon_core.client.operations import ( - OperationsSession, - new_subset_document, - new_version_doc, - new_representation_doc, - prepare_subset_update_data, - prepare_version_update_data, - prepare_representation_update_data, -) - -from ayon_core.client import ( - get_representations, - get_subset_by_name, +from ayon_api import ( + get_attributes_for_type, + get_product_by_name, get_version_by_name, + get_representations, ) +from ayon_api.operations import ( + OperationsSession, + new_product_entity, + new_version_entity, + new_representation_entity, +) +from ayon_api.utils import create_entity_id + from ayon_core.lib import source_hash from ayon_core.lib.file_transaction import ( FileTransaction, @@ -36,6 +33,36 @@ from ayon_core.pipeline.publish import ( log = logging.getLogger(__name__) +def prepare_changes(old_entity, new_entity): + """Prepare changes for entity update. + + Args: + old_entity: Existing entity. + new_entity: New entity. + + Returns: + dict[str, Any]: Changes that have new entity. + + """ + changes = {} + for key in set(new_entity.keys()): + if key == "attrib": + continue + + if key in new_entity and new_entity[key] != old_entity.get(key): + changes[key] = new_entity[key] + continue + + attrib_changes = {} + if "attrib" in new_entity: + for key, value in new_entity["attrib"].items(): + if value != old_entity["attrib"].get(key): + attrib_changes[key] = value + if attrib_changes: + changes["attrib"] = attrib_changes + return changes + + def get_instance_families(instance): """Get all families of the instance""" # todo: move this to lib? @@ -140,7 +167,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "uasset", "blendScene", "yeticacheUE", - "tycache" + "tycache", + "csv_ingest_file", ] default_template_name = "publish" @@ -164,7 +192,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ] def process(self, instance): - # Instance should be integrated on a farm if instance.data.get("farm"): self.log.debug( @@ -256,23 +283,22 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_name = self.get_template_name(instance) op_session = OperationsSession() - subset = self.prepare_subset( + product_entity = self.prepare_product( instance, op_session, project_name ) - version = self.prepare_version( - instance, op_session, subset, project_name + version_entity = self.prepare_version( + instance, op_session, product_entity, project_name ) - instance.data["versionEntity"] = version + instance.data["versionEntity"] = version_entity anatomy = instance.context.data["anatomy"] # Get existing representations (if any) existing_repres_by_name = { - repre_doc["name"].lower(): repre_doc - for repre_doc in get_representations( + repre_entity["name"].lower(): repre_entity + for repre_entity in get_representations( project_name, - version_ids=[version["_id"]], - fields=["_id", "name"] + version_ids=[version_entity["id"]] ) } @@ -284,7 +310,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): repre, template_name, existing_repres_by_name, - version, + version_entity, instance_stagingdir, instance) @@ -312,7 +338,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): resource_destinations.add(os.path.abspath(dst)) # Bulk write to the database - # We write the subset and version to the database before the File + # We write the product and version to the database before the File # Transaction to reduce the chances of another publish trying to # publish to the same version number since that chance can greatly # increase if the file transaction takes a long time. @@ -320,7 +346,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.info(( "Product '{}' version {} written to database.." - ).format(subset["name"], version["name"])) + ).format(product_entity["name"], version_entity["version"])) # Process all file transfers of all integrations now self.log.debug("Integrating source files to destination ...") @@ -331,58 +357,46 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "Transferred files: {}".format(file_transactions.transferred)) self.log.debug("Retrieving Representation Site Sync information ...") - # Get the accessible sites for Site Sync - addons_manager = instance.context.data["ayonAddonsManager"] - sync_server_addon = addons_manager.get("sync_server") - if sync_server_addon is None: - sites = [{ - "name": "studio", - "created_dt": datetime.datetime.now() - }] - else: - sites = sync_server_addon.compute_resource_sync_sites( - project_name=instance.data["projectEntity"]["name"] - ) - self.log.debug("Sync Server Sites: {}".format(sites)) - # Compute the resource file infos once (files belonging to the # version instance instead of an individual representation) so # we can re-use those file infos per representation - resource_file_infos = self.get_files_info(resource_destinations, - sites=sites, - anatomy=anatomy) + resource_file_infos = self.get_files_info( + resource_destinations, anatomy + ) # Finalize the representations now the published files are integrated # Get 'files' info for representations and its attached resources new_repre_names_low = set() for prepared in prepared_representations: - repre_doc = prepared["representation"] - repre_update_data = prepared["repre_doc_update_data"] + repre_entity = prepared["representation"] + repre_update_data = prepared["repre_update_data"] transfers = prepared["transfers"] destinations = [dst for src, dst in transfers] - repre_doc["files"] = self.get_files_info( - destinations, sites=sites, anatomy=anatomy + repre_files = self.get_files_info( + destinations, anatomy ) - # Add the version resource file infos to each representation - repre_doc["files"] += resource_file_infos + repre_files += resource_file_infos + repre_entity["files"] = repre_files # Set up representation for writing to the database. Since # we *might* be overwriting an existing entry if the version # already existed we'll use ReplaceOnce with `upsert=True` if repre_update_data is None: op_session.create_entity( - project_name, repre_doc["type"], repre_doc + project_name, "representation", repre_entity ) else: + # Add files to update data + repre_update_data["files"] = repre_files op_session.update_entity( project_name, - repre_doc["type"], - repre_doc["_id"], + "representation", + repre_entity["id"], repre_update_data ) - new_repre_names_low.add(repre_doc["name"].lower()) + new_repre_names_low.add(repre_entity["name"].lower()) # Delete any existing representations that didn't get any new data # if the instance is not set to append mode @@ -392,7 +406,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # We add the exact representation name because `name` is # lowercase for name matching only and not in the database op_session.delete_entity( - project_name, "representation", existing_repres["_id"] + project_name, "representation", existing_repres["id"] ) self.log.debug("{}".format(op_session.to_data())) @@ -401,7 +415,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Backwards compatibility used in hero integration. # todo: can we avoid the need to store this? instance.data["published_representations"] = { - p["representation"]["_id"]: p for p in prepared_representations + p["representation"]["id"]: p + for p in prepared_representations } self.log.info( @@ -412,108 +427,130 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ) ) - def prepare_subset(self, instance, op_session, project_name): - asset_doc = instance.data["assetEntity"] + def prepare_product(self, instance, op_session, project_name): + folder_entity = instance.data["folderEntity"] product_name = instance.data["productName"] product_type = instance.data["productType"] self.log.debug("Product: {}".format(product_name)) - # Get existing subset if it exists - existing_subset_doc = get_subset_by_name( - project_name, product_name, asset_doc["_id"] + # Get existing product if it exists + existing_product_entity = get_product_by_name( + project_name, product_name, folder_entity["id"] ) - # Define subset data + # Define product data data = { "families": get_instance_families(instance) } + attribibutes = {} - subset_group = instance.data.get("subsetGroup") - if subset_group: - data["subsetGroup"] = subset_group - elif existing_subset_doc: - # Preserve previous subset group if new version does not set it - if "subsetGroup" in existing_subset_doc.get("data", {}): - subset_group = existing_subset_doc["data"]["subsetGroup"] - data["subsetGroup"] = subset_group + product_group = instance.data.get("productGroup") + if product_group: + attribibutes["productGroup"] = product_group + elif existing_product_entity: + # Preserve previous product group if new version does not set it + product_group = existing_product_entity.get("attrib", {}).get( + "productGroup" + ) + if product_group is not None: + attribibutes["productGroup"] = product_group - subset_id = None - if existing_subset_doc: - subset_id = existing_subset_doc["_id"] - subset_doc = new_subset_document( - product_name, product_type, asset_doc["_id"], data, subset_id + product_id = None + if existing_product_entity: + product_id = existing_product_entity["id"] + + product_entity = new_product_entity( + product_name, + product_type, + folder_entity["id"], + data=data, + attribs=attribibutes, + entity_id=product_id ) - if existing_subset_doc is None: - # Create a new subset + if existing_product_entity is None: + # Create a new product self.log.info( "Product '%s' not found, creating ..." % product_name ) op_session.create_entity( - project_name, subset_doc["type"], subset_doc + project_name, "product", product_entity ) else: - # Update existing subset data with new data and set in database. - # We also change the found subset in-place so we don't need to - # re-query the subset afterwards - subset_doc["data"].update(data) - update_data = prepare_subset_update_data( - existing_subset_doc, subset_doc + # Update existing product data with new data and set in database. + # We also change the found product in-place so we don't need to + # re-query the product afterwards + update_data = prepare_changes( + existing_product_entity, product_entity ) op_session.update_entity( project_name, - subset_doc["type"], - subset_doc["_id"], + "product", + product_entity["id"], update_data ) self.log.debug("Prepared product: {}".format(product_name)) - return subset_doc + return product_entity - def prepare_version(self, instance, op_session, subset_doc, project_name): + def prepare_version( + self, instance, op_session, product_entity, project_name + ): version_number = instance.data["version"] + task_id = None + task_entity = instance.data.get("taskEntity") + if task_entity: + task_id = task_entity["id"] existing_version = get_version_by_name( project_name, version_number, - subset_doc["_id"], - fields=["_id"] + product_entity["id"] ) version_id = None if existing_version: - version_id = existing_version["_id"] + version_id = existing_version["id"] - version_data = self.create_version_data(instance) - version_doc = new_version_doc( + all_version_data = self.create_version_data(instance) + version_data = {} + version_attributes = {} + attr_defs = self._get_attributes_for_type(instance.context, "version") + for key, value in all_version_data.items(): + if key in attr_defs: + version_attributes[key] = value + else: + version_data[key] = value + + version_entity = new_version_entity( version_number, - subset_doc["_id"], - version_data, - version_id + product_entity["id"], + task_id=task_id, + data=version_data, + attribs=version_attributes, + entity_id=version_id, ) if existing_version: self.log.debug("Updating existing version ...") - update_data = prepare_version_update_data( - existing_version, version_doc - ) + update_data = prepare_changes(existing_version, version_entity) op_session.update_entity( project_name, - version_doc["type"], - version_doc["_id"], + "version", + version_entity["id"], update_data ) else: self.log.debug("Creating new version ...") op_session.create_entity( - project_name, version_doc["type"], version_doc + project_name, "version", version_entity ) self.log.debug( - "Prepared version: v{0:03d}".format(version_doc["name"]) + "Prepared version: v{0:03d}".format(version_entity["version"]) ) - return version_doc + return version_entity def _validate_repre_files(self, files, is_sequence_representation): """Validate representation files before transfer preparation. @@ -552,13 +589,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ", ".join([str(rem) for rem in remainders]) )) - def prepare_representation(self, repre, - template_name, - existing_repres_by_name, - version, - instance_stagingdir, - instance): - + def prepare_representation( + self, + repre, + template_name, + existing_repres_by_name, + version_entity, + instance_stagingdir, + instance + ): # pre-flight validations if repre["ext"].startswith("."): raise KnownPublishError(( @@ -581,7 +620,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_data["ext"] = repre["ext"] # allow overwriting existing version - template_data["version"] = version["name"] + template_data["version"] = version_entity["version"] # add template data for colorspaceData if repre.get("colorspaceData"): @@ -627,8 +666,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.debug("Anatomy template name: {}".format(template_name)) anatomy = instance.context.data["anatomy"] - publish_template_category = anatomy.templates[template_name] - template = os.path.normpath(publish_template_category["path"]) + publish_template = anatomy.get_template_item("publish", template_name) + path_template_obj = publish_template["path"] + template = path_template_obj.template.replace("\\", "/") is_udim = bool(repre.get("udim")) @@ -660,7 +700,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # - template_data (Dict[str, Any]): source data used to fill template # - to add required data to 'repre_context' not used for # formatting - path_template_obj = anatomy.templates_obj[template_name]["path"] # Treat template with 'orignalBasename' in special way if "{originalBasename}" in template: @@ -715,9 +754,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): if not is_udim: # Change padding for frames if template has defined higher # padding. - template_padding = int( - publish_template_category["frame_padding"] - ) + template_padding = anatomy.templates_obj.frame_padding if template_padding > destination_padding: destination_padding = template_padding @@ -803,7 +840,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # todo: Are we sure the assumption each representation # ends up in the same folder is valid? if not instance.data.get("publishDir"): - template_obj = anatomy.templates_obj[template_name]["folder"] + template_obj = publish_template["directory"] template_filled = template_obj.format_strict(template_data) instance.data["publishDir"] = template_filled @@ -823,7 +860,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): existing = existing_repres_by_name.get(repre["name"].lower()) repre_id = None if existing: - repre_id = existing["_id"] + repre_id = existing["id"] # Store first transferred destination as published path data # - used primarily for reviews that are integrated to custom modules @@ -835,25 +872,37 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # todo: `repre` is not the actual `representation` entity # we should simplify/clarify difference between data above # and the actual representation entity for the database - data = repre.get("data", {}) - data.update({"path": published_path, "template": template}) + attr_defs = self._get_attributes_for_type( + instance.context, "representation" + ) + attributes = {"path": published_path, "template": template} + data = {"context": repre_context} + for key, value in repre.get("data", {}).items(): + if key in attr_defs: + attributes[key] = value + else: + data[key] = value # add colorspace data if any exists on representation if repre.get("colorspaceData"): data["colorspaceData"] = repre["colorspaceData"] - repre_doc = new_representation_doc( - repre["name"], version["_id"], repre_context, data, repre_id + repre_doc = new_representation_entity( + repre["name"], + version_entity["id"], + # files are filled afterwards + [], + data=data, + attribs=attributes, + entity_id=repre_id ) update_data = None if repre_id is not None: - update_data = prepare_representation_update_data( - existing, repre_doc - ) + update_data = prepare_changes(existing, repre_doc) return { "representation": repre_doc, - "repre_doc_update_data": update_data, + "repre_update_data": update_data, "anatomy_data": template_data, "transfers": transfers, # todo: avoid the need for 'published_files' used by Integrate Hero @@ -950,13 +999,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin): '{root}/MyProject1/Assets...' Args: - anatomy: anatomy part from instance - path: path (absolute) - Returns: - path: modified path if possible, or unmodified path - + warning logged - """ + anatomy (Anatomy): Project anatomy. + path (str): Absolute path. + Returns: + str: Path where root path is replaced by formatting string. + + """ success, rootless_path = anatomy.find_root_template_from_path(path) if success: path = rootless_path @@ -967,43 +1016,41 @@ class IntegrateAsset(pyblish.api.InstancePlugin): ).format(path)) return path - def get_files_info(self, destinations, sites, anatomy): + def get_files_info(self, filepaths, anatomy): """Prepare 'files' info portion for representations. Arguments: - destinations (list): List of transferred file destinations - sites (list): array of published locations - anatomy: anatomy part from instance - Returns: - output_resources: array of dictionaries to be added to 'files' key - in representation - """ + filepaths (Iterable[str]): List of transferred file paths. + anatomy (Anatomy): Project anatomy. + Returns: + list[dict[str, Any]]: Representation 'files' information. + + """ file_infos = [] - for file_path in destinations: - file_info = self.prepare_file_info(file_path, anatomy, sites=sites) + for filepath in filepaths: + file_info = self.prepare_file_info(filepath, anatomy) file_infos.append(file_info) return file_infos - def prepare_file_info(self, path, anatomy, sites): + def prepare_file_info(self, path, anatomy): """ Prepare information for one file (asset or resource) Arguments: - path: destination url of published file - anatomy: anatomy part from instance - sites: array of published locations, - [ {'name':'studio', 'created_dt':date} by default - keys expected ['studio', 'site1', 'gdrive1'] + path (str): Destination url of published file. + anatomy (Anatomy): Project anatomy part from instance. Returns: - dict: file info dictionary - """ + dict[str, Any]: Representation file info dictionary. + """ return { + "id": create_entity_id(), + "name": os.path.basename(path), "path": self.get_rootless_path(anatomy, path), "size": os.path.getsize(path), "hash": source_hash(path), - "sites": sites + "hash_type": "op3", } def _validate_path_in_project_roots(self, anatomy, file_path): @@ -1012,10 +1059,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin): Used to check that published path belongs to project, eg. we are not trying to publish to local only folder. Args: - anatomy (Anatomy) - file_path (str) - Raises - (KnownPublishError) + anatomy (Anatomy): Project anatomy. + file_path (str): Filepath. + + Raises: + KnownPublishError: When failed to find root for the path. """ path = self.get_rootless_path(anatomy, file_path) if not path: @@ -1023,3 +1071,21 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "Destination path '{}' ".format(file_path) + "must be in project dir" )) + + def _get_attributes_for_type(self, context, entity_type): + return self._get_attributes_by_type(context)[entity_type] + + def _get_attributes_by_type(self, context): + attributes = context.data.get("ayonAttributes") + if attributes is None: + attributes = {} + for key in ( + "project", + "folder", + "product", + "version", + "representation", + ): + attributes[key] = get_attributes_for_type(key) + context.data["ayonAttributes"] = attributes + return attributes diff --git a/client/ayon_core/plugins/publish/integrate_hero_version.py b/client/ayon_core/plugins/publish/integrate_hero_version.py index c275f75118..8c36719b77 100644 --- a/client/ayon_core/plugins/publish/integrate_hero_version.py +++ b/client/ayon_core/plugins/publish/integrate_hero_version.py @@ -1,31 +1,57 @@ import os import copy -import clique import errno import shutil +import clique import pyblish.api - -from ayon_core.client import ( - get_version_by_id, - get_hero_version_by_subset_id, - get_archived_representations, - get_representations, -) -from ayon_core.client.operations import ( +import ayon_api +from ayon_api.operations import ( OperationsSession, - new_hero_version_doc, - prepare_hero_version_update_data, - prepare_representation_update_data, + new_version_entity, ) -from ayon_core.lib import create_hard_link -from ayon_core.pipeline import ( - schema +from ayon_api.utils import create_entity_id + +from ayon_core.lib import create_hard_link, source_hash +from ayon_core.pipeline.publish import ( + get_publish_template_name, + OptionalPyblishPluginMixin, ) -from ayon_core.pipeline.publish import get_publish_template_name -class IntegrateHeroVersion(pyblish.api.InstancePlugin): +def prepare_changes(old_entity, new_entity): + """Prepare changes for entity update. + + Args: + old_entity: Existing entity. + new_entity: New entity. + + Returns: + dict[str, Any]: Changes that have new entity. + + """ + changes = {} + for key in set(new_entity.keys()): + if key == "attrib": + continue + + if key in new_entity and new_entity[key] != old_entity.get(key): + changes[key] = new_entity[key] + continue + + attrib_changes = {} + if "attrib" in new_entity: + for key, value in new_entity["attrib"].items(): + if value != old_entity["attrib"].get(key): + attrib_changes[key] = value + if attrib_changes: + changes["attrib"] = attrib_changes + return changes + + +class IntegrateHeroVersion( + OptionalPyblishPluginMixin, pyblish.api.InstancePlugin +): label = "Integrate Hero Version" # Must happen after IntegrateNew order = pyblish.api.IntegratorOrder + 0.1 @@ -63,9 +89,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): # permissions error on files (files were used or user didn't have perms) # *but all other plugins must be sucessfully completed - _default_template_name = "hero" - def process(self, instance): + if not self.is_active(instance.data): + return + self.log.debug( "--- Integration of Hero version for product `{}` begins.".format( instance.data["productName"] @@ -82,22 +109,17 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): project_name = anatomy.project_name template_key = self._get_template_key(project_name, instance) + hero_template = anatomy.get_template_item( + "hero", template_key, "path", default=None + ) - if template_key not in anatomy.templates: + if hero_template is None: self.log.warning(( "!!! Anatomy of project \"{}\" does not have set" " \"{}\" template key!" ).format(project_name, template_key)) return - if "path" not in anatomy.templates[template_key]: - self.log.warning(( - "!!! There is not set \"path\" template in \"{}\" anatomy" - " for project \"{}\"." - ).format(template_key, project_name)) - return - - hero_template = anatomy.templates[template_key]["path"] self.log.debug("`hero` template check was successful. `{}`".format( hero_template )) @@ -150,7 +172,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): )) return - if src_version_entity["name"] == 0: + if src_version_entity["version"] == 0: self.log.debug( "Version 0 cannot have hero version. Skipping." ) @@ -200,39 +222,45 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): old_version, old_repres = self.current_hero_ents( project_name, src_version_entity ) - - old_repres_by_name = { - repre["name"].lower(): repre for repre in old_repres - } + inactive_old_repres_by_name = {} + old_repres_by_name = {} + for repre in old_repres: + low_name = repre["name"].lower() + if repre["active"]: + old_repres_by_name[low_name] = repre + else: + inactive_old_repres_by_name[low_name] = repre op_session = OperationsSession() entity_id = None if old_version: - entity_id = old_version["_id"] + entity_id = old_version["id"] - new_hero_version = new_hero_version_doc( - src_version_entity["parent"], - copy.deepcopy(src_version_entity["data"]), - src_version_entity["name"], - entity_id=entity_id + new_hero_version = new_version_entity( + - src_version_entity["version"], + src_version_entity["productId"], + task_id=src_version_entity.get("taskId"), + data=copy.deepcopy(src_version_entity["data"]), + attribs=copy.deepcopy(src_version_entity["attrib"]), + entity_id=entity_id, ) if old_version: self.log.debug("Replacing old hero version.") - update_data = prepare_hero_version_update_data( + update_data = prepare_changes( old_version, new_hero_version ) op_session.update_entity( project_name, - new_hero_version["type"], - old_version["_id"], + "version", + old_version["id"], update_data ) else: self.log.debug("Creating first hero version.") op_session.create_entity( - project_name, new_hero_version["type"], new_hero_version + project_name, "version", new_hero_version ) # Separate old representations into `to replace` and `to delete` @@ -249,16 +277,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): if old_repres_by_name: old_repres_to_delete = old_repres_by_name - archived_repres = list(get_archived_representations( - project_name, - # Check what is type of archived representation - version_ids=[new_hero_version["_id"]] - )) - archived_repres_by_name = {} - for repre in archived_repres: - repre_name_low = repre["name"].lower() - archived_repres_by_name[repre_name_low] = repre - backup_hero_publish_dir = None if os.path.exists(hero_publish_dir): backup_hero_publish_dir = hero_publish_dir + ".BACKUP" @@ -306,9 +324,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): "Could not create hero version because it is not" " possible to replace current hero files." )) + try: src_to_dst_file_paths = [] - path_template_obj = anatomy.templates_obj[template_key]["path"] + repre_integrate_data = [] + path_template_obj = anatomy.get_template_item( + "hero", template_key, "path" + ) for repre_info in published_repres.values(): # Skip if new repre does not have published repre files @@ -321,11 +343,9 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_data.pop("version", None) # Get filled path to repre context - template_filled = path_template_obj.format_strict(anatomy_data) - repre_data = { - "path": str(template_filled), - "template": hero_template - } + template_filled = path_template_obj.format_strict( + anatomy_data + ) repre_context = template_filled.used_values for key in self.db_representation_context_keys: value = anatomy_data.get(key) @@ -333,14 +353,19 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): repre_context[key] = value # Prepare new repre - repre = copy.deepcopy(repre_info["representation"]) - repre["parent"] = new_hero_version["_id"] - repre["context"] = repre_context - repre["data"] = repre_data - repre.pop("_id", None) + repre_entity = copy.deepcopy(repre_info["representation"]) + repre_entity.pop("id", None) + repre_entity["versionId"] = new_hero_version["id"] + repre_entity["context"] = repre_context + repre_entity["attrib"] = { + "path": str(template_filled), + "template": hero_template.template + } + dst_paths = [] # Prepare paths of source and destination files if len(published_files) == 1: + dst_paths.append(str(template_filled)) src_to_dst_file_paths.append( (published_files[0], template_filled) ) @@ -363,9 +388,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): anatomy_data ) head, tail = _template_filled.split(frame_splitter) - padding = int( - anatomy.templates[template_key]["frame_padding"] - ) + padding = anatomy.templates_obj.frame_padding dst_col = clique.Collection( head=head, padding=padding, tail=tail @@ -376,84 +399,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): src_to_dst_file_paths.append( (src_file, dst_file) ) + dst_paths.append(dst_file) - # replace original file name with hero name in repre doc - for index in range(len(repre.get("files"))): - file = repre.get("files")[index] - file_name = os.path.basename(file.get('path')) - for src_file, dst_file in src_to_dst_file_paths: - src_file_name = os.path.basename(src_file) - if src_file_name == file_name: - repre["files"][index]["path"] = self._update_path( - anatomy, repre["files"][index]["path"], - src_file, dst_file) - - repre["files"][index]["hash"] = self._update_hash( - repre["files"][index]["hash"], - src_file_name, dst_file - ) - - schema.validate(repre) - - repre_name_low = repre["name"].lower() - # Replace current representation - if repre_name_low in old_repres_to_replace: - old_repre = old_repres_to_replace.pop(repre_name_low) - - repre["_id"] = old_repre["_id"] - update_data = prepare_representation_update_data( - old_repre, repre) - - # Keep previously synchronized sites up-to-date - # by comparing old and new sites and adding old sites - # if missing in new ones - # Prepare all sites from all files in old representation - old_site_names = set() - for file_info in old_repre.get("files", []): - old_site_names |= { - site["name"] - for site in file_info["sites"] - } - - for file_info in update_data.get("files", []): - file_info.setdefault("sites", []) - file_info_site_names = { - site["name"] - for site in file_info["sites"] - } - for site_name in old_site_names: - if site_name not in file_info_site_names: - file_info["sites"].append({ - "name": site_name - }) - - op_session.update_entity( - project_name, - old_repre["type"], - old_repre["_id"], - update_data - ) - - # Unarchive representation - elif repre_name_low in archived_repres_by_name: - archived_repre = archived_repres_by_name.pop( - repre_name_low - ) - repre["_id"] = archived_repre["old_id"] - update_data = prepare_representation_update_data( - archived_repre, repre) - op_session.update_entity( - project_name, - old_repre["type"], - archived_repre["_id"], - update_data - ) - - # Create representation - else: - repre.pop("_id", None) - op_session.create_entity(project_name, "representation", - repre) + repre_integrate_data.append( + (repre_entity, dst_paths) + ) self.path_checks = [] @@ -466,28 +416,61 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): for src_path, dst_path in other_file_paths_mapping: self.copy_file(src_path, dst_path) - # Archive not replaced old representations - for repre_name_low, repre in old_repres_to_delete.items(): - # Replace archived representation (This is backup) - # - should not happen to have both repre and archived repre - if repre_name_low in archived_repres_by_name: - archived_repre = archived_repres_by_name.pop( - repre_name_low + # Update prepared representation etity data with files + # and integrate it to server. + # NOTE: This must happen with existing files on disk because of + # file hash. + for repre_entity, dst_paths in repre_integrate_data: + repre_files = self.get_files_info(dst_paths, anatomy) + repre_entity["files"] = repre_files + + repre_name_low = repre_entity["name"].lower() + # Replace current representation + if repre_name_low in old_repres_to_replace: + old_repre = old_repres_to_replace.pop(repre_name_low) + + repre_entity["id"] = old_repre["id"] + update_data = prepare_changes(old_repre, repre_entity) + + op_session.update_entity( + project_name, + "representation", + old_repre["id"], + update_data ) - changes = {"old_id": repre["_id"], - "_id": archived_repre["_id"], - "type": archived_repre["type"]} - op_session.update_entity(project_name, - archived_repre["type"], - archived_repre["_id"], - changes) + # Activate representation + elif repre_name_low in inactive_old_repres_by_name: + inactive_repre = inactive_old_repres_by_name.pop( + repre_name_low + ) + repre_entity["id"] = inactive_repre["id"] + update_data = prepare_changes( + inactive_repre, repre_entity + ) + op_session.update_entity( + project_name, + "representation", + inactive_repre["id"], + update_data + ) + + # Create representation else: - repre["old_id"] = repre.pop("_id") - repre["type"] = "archived_representation" - op_session.create_entity(project_name, - "archived_representation", - repre) + op_session.create_entity( + project_name, + "representation", + repre_entity + ) + + # Deactivate not replaced representations + for repre in old_repres_to_delete.values(): + op_session.update_entity( + project_name, + "representation", + repre["id"], + {"active": False} + ) op_session.commit() @@ -519,13 +502,42 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): instance.data["productName"] )) - def get_all_files_from_path(self, path): - files = [] - for (dir_path, dir_names, file_names) in os.walk(path): - for file_name in file_names: - _path = os.path.join(dir_path, file_name) - files.append(_path) - return files + def get_files_info(self, filepaths, anatomy): + """Prepare 'files' info portion for representations. + + Arguments: + filepaths (Iterable[str]): List of transferred file paths. + anatomy (Anatomy): Project anatomy. + + Returns: + list[dict[str, Any]]: Representation 'files' information. + + """ + file_infos = [] + for filepath in filepaths: + file_info = self.prepare_file_info(filepath, anatomy) + file_infos.append(file_info) + return file_infos + + def prepare_file_info(self, path, anatomy): + """ Prepare information for one file (asset or resource) + + Arguments: + path (str): Destination url of published file. + anatomy (Anatomy): Project anatomy part from instance. + + Returns: + dict[str, Any]: Representation file info dictionary. + + """ + return { + "id": create_entity_id(), + "name": os.path.basename(path), + "path": self.get_rootless_path(anatomy, path), + "size": os.path.getsize(path), + "hash": source_hash(path), + "hash_type": "op3", + } def get_publish_dir(self, instance, template_key): anatomy = instance.context.data["anatomy"] @@ -536,29 +548,12 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): "originalBasename": instance.data.get("originalBasename") }) - if "folder" in anatomy.templates[template_key]: - template_obj = anatomy.templates_obj[template_key]["folder"] - publish_folder = template_obj.format_strict(template_data) - else: - # This is for cases of Deprecated anatomy without `folder` - # TODO remove when all clients have solved this issue - self.log.warning(( - "Deprecation warning: Anatomy does not have set `folder`" - " key underneath `publish` (in global of for project `{}`)." - ).format(anatomy.project_name)) - # solve deprecated situation when `folder` key is not underneath - # `publish` anatomy - template_data.update({ - "frame": "FRAME_TEMP", - "representation": "TEMP" - }) - template_obj = anatomy.templates_obj[template_key]["path"] - file_path = template_obj.format_strict(template_data) - - # Directory - publish_folder = os.path.dirname(file_path) - - publish_folder = os.path.normpath(publish_folder) + template_obj = anatomy.get_template_item( + "hero", template_key, "directory" + ) + publish_folder = os.path.normpath( + template_obj.format_strict(template_data) + ) self.log.debug("hero publish dir: \"{}\"".format(publish_folder)) @@ -581,6 +576,33 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): logger=self.log ) + def get_rootless_path(self, anatomy, path): + """Returns, if possible, path without absolute portion from root + (eg. 'c:\' or '/opt/..') + + This information is platform dependent and shouldn't be captured. + Example: + 'c:/projects/MyProject1/Assets/publish...' > + '{root}/MyProject1/Assets...' + + Args: + anatomy (Anatomy): Project anatomy. + path (str): Absolute path. + + Returns: + str: Path where root path is replaced by formatting string. + + """ + success, rootless_path = anatomy.find_root_template_from_path(path) + if success: + path = rootless_path + else: + self.log.warning(( + "Could not find root path for remapping \"{}\"." + " This may cause issues on farm." + ).format(path)) + return path + def copy_file(self, src_path, dst_path): # TODO check drives if are the same to check if cas hardlink dirname = os.path.dirname(dst_path) @@ -617,48 +639,25 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin): def version_from_representations(self, project_name, repres): for repre in repres: - version = get_version_by_id(project_name, repre["parent"]) + version = ayon_api.get_version_by_id( + project_name, repre["versionId"] + ) if version: return version def current_hero_ents(self, project_name, version): - hero_version = get_hero_version_by_subset_id( - project_name, version["parent"] + hero_version = ayon_api.get_hero_version_by_product_id( + project_name, version["productId"] ) if not hero_version: return (None, []) - hero_repres = list(get_representations( - project_name, version_ids=[hero_version["_id"]] + hero_repres = list(ayon_api.get_representations( + project_name, version_ids={hero_version["id"]} )) return (hero_version, hero_repres) - def _update_path(self, anatomy, path, src_file, dst_file): - """ - Replaces source path with new hero path - - 'path' contains original path with version, must be replaced with - 'hero' path (with 'hero' label and without version) - - Args: - anatomy (Anatomy) - to get rootless style of path - path (string) - path from DB - src_file (string) - original file path - dst_file (string) - hero file path - """ - _, rootless = anatomy.find_root_template_from_path(dst_file) - _, rtls_src = anatomy.find_root_template_from_path(src_file) - return path.replace(rtls_src, rootless) - - def _update_hash(self, hash, src_file_name, dst_file): - """ - Updates hash value with proper hero name - """ - src_file_name = self._get_name_without_ext(src_file_name) - hero_file_name = self._get_name_without_ext(dst_file) - return hash.replace(src_file_name, hero_file_name) - def _get_name_without_ext(self, value): file_name = os.path.basename(value) file_name, _ = os.path.splitext(file_name) diff --git a/client/ayon_core/plugins/publish/integrate_inputlinks.py b/client/ayon_core/plugins/publish/integrate_inputlinks.py index f7e802f410..16aef09a39 100644 --- a/client/ayon_core/plugins/publish/integrate_inputlinks.py +++ b/client/ayon_core/plugins/publish/integrate_inputlinks.py @@ -55,8 +55,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin): if not instance.data.get("publish", True): continue - version_doc = instance.data.get("versionEntity") - if not version_doc: + if not instance.data.get("versionEntity"): self.log.debug( "Instance {} doesn't have version.".format(instance)) continue @@ -88,14 +87,14 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin): self.log.warn("No workfile in this publish session.") return - workfile_version_id = workfile_instance.data["versionEntity"]["_id"] + workfile_version_id = workfile_instance.data["versionEntity"]["id"] # link workfile to all publishing versions for instance in other_instances: self.add_link( new_links_by_type, "generative", workfile_version_id, - instance.data["versionEntity"]["_id"], + instance.data["versionEntity"]["id"], ) loaded_versions = workfile_instance.context.get("loadedVersions") @@ -123,7 +122,7 @@ class IntegrateInputLinksAYON(pyblish.api.ContextPlugin): new_links_by_type, "generative", input_version, - version_entity["_id"], + version_entity["id"], ) def _get_existing_links(self, project_name, link_type, entity_ids): diff --git a/client/ayon_core/plugins/publish/integrate_product_group.py b/client/ayon_core/plugins/publish/integrate_product_group.py index f69e7744d9..90887a359d 100644 --- a/client/ayon_core/plugins/publish/integrate_product_group.py +++ b/client/ayon_core/plugins/publish/integrate_product_group.py @@ -1,10 +1,10 @@ -"""Produces instance.data["subsetGroup"] data used during integration. +"""Produces instance.data["productGroup"] data used during integration. Requires: dict -> context["anatomyData"] *(pyblish.api.CollectorOrder + 0.49) Provides: - instance -> subsetGroup (str) + instance -> productGroup (str) """ import pyblish.api @@ -18,7 +18,7 @@ from ayon_core.lib import ( class IntegrateProductGroup(pyblish.api.InstancePlugin): - """Integrate Subset Group for publish.""" + """Integrate Product Group for publish.""" # Run after CollectAnatomyInstanceData order = pyblish.api.IntegratorOrder - 0.1 @@ -37,11 +37,11 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin): if not self.product_grouping_profiles: return - if instance.data.get("subsetGroup"): - # If subsetGroup is already set then allow that value to remain + if instance.data.get("productGroup"): + # If productGroup is already set then allow that value to remain self.log.debug(( "Skipping collect product group due to existing value: {}" - ).format(instance.data["subsetGroup"])) + ).format(instance.data["productGroup"])) return # Skip if there is no matching profile @@ -79,11 +79,11 @@ class IntegrateProductGroup(pyblish.api.InstancePlugin): except (KeyError, TemplateUnsolved): keys = fill_pairs.keys() self.log.warning(( - "Subset grouping failed. Only {} are expected in Settings" + "Product grouping failed. Only {} are expected in Settings" ).format(','.join(keys))) if filled_template: - instance.data["subsetGroup"] = filled_template + instance.data["productGroup"] = filled_template def get_profile_filter_criteria(self, instance): """Return filter criteria for `filter_profiles`""" diff --git a/client/ayon_core/plugins/publish/integrate_thumbnail.py b/client/ayon_core/plugins/publish/integrate_thumbnail.py index 9eb649d5a0..ca32e60cc2 100644 --- a/client/ayon_core/plugins/publish/integrate_thumbnail.py +++ b/client/ayon_core/plugins/publish/integrate_thumbnail.py @@ -26,9 +26,8 @@ import os import collections import pyblish.api - -from ayon_core.client import get_versions -from ayon_core.client.operations import OperationsSession +import ayon_api +from ayon_api.operations import OperationsSession InstanceFilterResult = collections.namedtuple( "InstanceFilterResult", @@ -59,20 +58,20 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): for instance_items in filtered_instance_items } # Query versions - version_docs = get_versions( + version_entities = ayon_api.get_versions( project_name, version_ids=version_ids, hero=True, - fields=["_id", "type", "name"] + fields={"id", "version"} ) # Store version by their id (converted to string) - version_docs_by_str_id = { - str(version_doc["_id"]): version_doc - for version_doc in version_docs + version_entities_by_id = { + version_entity["id"]: version_entity + for version_entity in version_entities } self._integrate_thumbnails( filtered_instance_items, - version_docs_by_str_id, + version_entities_by_id, project_name ) @@ -84,6 +83,7 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): ) filtered_instances = [] + anatomy = context.data["anatomy"] for instance in context: instance_label = self._get_instance_label(instance) # Skip instances without published representations @@ -99,7 +99,9 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): # Find thumbnail path on instance thumbnail_path = ( instance.data.get("thumbnailPath") - or self._get_instance_thumbnail_path(published_repres) + or self._get_instance_thumbnail_path( + published_repres, anatomy + ) ) if thumbnail_path: self.log.debug(( @@ -132,70 +134,72 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): def _get_version_id(self, published_representations): for repre_info in published_representations.values(): - return repre_info["representation"]["parent"] + return repre_info["representation"]["versionId"] - def _get_instance_thumbnail_path(self, published_representations): - thumb_repre_doc = None + def _get_instance_thumbnail_path( + self, published_representations, anatomy + ): + thumb_repre_entity = None for repre_info in published_representations.values(): - repre_doc = repre_info["representation"] - if "thumbnail" in repre_doc["name"].lower(): - thumb_repre_doc = repre_doc + repre_entity = repre_info["representation"] + if "thumbnail" in repre_entity["name"].lower(): + thumb_repre_entity = repre_entity break - if thumb_repre_doc is None: + if thumb_repre_entity is None: self.log.debug( "There is no representation with name \"thumbnail\"" ) return None - path = thumb_repre_doc["data"]["path"] - if not os.path.exists(path): + path = thumb_repre_entity["attrib"]["path"] + filled_path = anatomy.fill_root(path) + if not os.path.exists(filled_path): self.log.warning( - "Thumbnail file cannot be found. Path: {}".format(path) + "Thumbnail file cannot be found. Path: {}".format(filled_path) ) return None - return os.path.normpath(path) + return os.path.normpath(filled_path) def _integrate_thumbnails( self, filtered_instance_items, - version_docs_by_str_id, + version_entities_by_id, project_name ): - from ayon_core.client.operations import create_thumbnail - # Make sure each entity id has defined only one thumbnail id thumbnail_info_by_entity_id = {} for instance_item in filtered_instance_items: instance, thumbnail_path, version_id = instance_item instance_label = self._get_instance_label(instance) - version_doc = version_docs_by_str_id.get(version_id) - if not version_doc: + version_entity = version_entities_by_id.get(version_id) + if not version_entity: self.log.warning(( "Version entity for instance \"{}\" was not found." ).format(instance_label)) continue - thumbnail_id = create_thumbnail(project_name, thumbnail_path) + thumbnail_id = ayon_api.create_thumbnail( + project_name, thumbnail_path + ) # Set thumbnail id for version thumbnail_info_by_entity_id[version_id] = { "thumbnail_id": thumbnail_id, - "entity_type": version_doc["type"], + "entity_type": "version", } - if version_doc["type"] == "hero_version": + version_name = version_entity["version"] + if version_name < 0: version_name = "Hero" - else: - version_name = version_doc["name"] self.log.debug("Setting thumbnail for version \"{}\" <{}>".format( version_name, version_id )) - asset_entity = instance.data["assetEntity"] + folder_id = instance.data["folderEntity"]["id"] folder_path = instance.data["folderPath"] - thumbnail_info_by_entity_id[asset_entity["_id"]] = { + thumbnail_info_by_entity_id[folder_id] = { "thumbnail_id": thumbnail_id, - "entity_type": "asset", + "entity_type": "folder", } self.log.debug("Setting thumbnail for folder \"{}\" <{}>".format( folder_path, version_id @@ -208,7 +212,7 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin): project_name, thumbnail_info["entity_type"], entity_id, - {"data.thumbnail_id": thumbnail_id} + {"thumbnailId": thumbnail_id} ) op_session.commit() diff --git a/client/ayon_core/plugins/publish/integrate_version_attrs.py b/client/ayon_core/plugins/publish/integrate_version_attrs.py index bc09af9db0..eadf4a194c 100644 --- a/client/ayon_core/plugins/publish/integrate_version_attrs.py +++ b/client/ayon_core/plugins/publish/integrate_version_attrs.py @@ -1,7 +1,6 @@ import pyblish.api import ayon_api - -from ayon_core.client.operations import OperationsSession +from ayon_api.operations import OperationsSession class IntegrateVersionAttributes(pyblish.api.ContextPlugin): @@ -33,6 +32,8 @@ class IntegrateVersionAttributes(pyblish.api.ContextPlugin): version_entity = instance.data.get("versionEntity") if not version_entity: continue + + current_attributes = version_entity["attrib"] attributes = instance.data.get("versionAttributes") if not attributes: self.log.debug(( @@ -45,7 +46,7 @@ class IntegrateVersionAttributes(pyblish.api.ContextPlugin): for attr, value in attributes.items(): if attr not in available_attributes: skipped_attributes.add(attr) - else: + elif current_attributes.get(attr) != value: filtered_attributes[attr] = value if not filtered_attributes: @@ -56,12 +57,12 @@ class IntegrateVersionAttributes(pyblish.api.ContextPlugin): continue self.log.debug("Updating attributes on version {} to {}".format( - version_entity["_id"], str(filtered_attributes) + version_entity["id"], str(filtered_attributes) )) op_session.update_entity( project_name, "version", - version_entity["_id"], + version_entity["id"], {"attrib": filtered_attributes} ) diff --git a/client/ayon_core/plugins/publish/validate_asset_docs.py b/client/ayon_core/plugins/publish/validate_asset_docs.py index 22d957f6e2..95fe4252be 100644 --- a/client/ayon_core/plugins/publish/validate_asset_docs.py +++ b/client/ayon_core/plugins/publish/validate_asset_docs.py @@ -2,27 +2,27 @@ import pyblish.api from ayon_core.pipeline import PublishValidationError -class ValidateAssetDocs(pyblish.api.InstancePlugin): - """Validate existence of asset documents on instances. +class ValidateFolderEntities(pyblish.api.InstancePlugin): + """Validate existence of folder entity on instances. - Without asset document it is not possible to publish the instance. + Without folder entity it is not possible to publish the instance. - If context has set asset document the validation is skipped. + If context has set folder entity the validation is skipped. - Plugin was added because there are cases when context asset is not defined - e.g. in tray publisher. + Plugin was added because there are cases when context folder is not + defined e.g. in tray publisher. """ - label = "Validate Asset docs" + label = "Validate Folder entities" order = pyblish.api.ValidatorOrder def process(self, instance): - context_asset_doc = instance.context.data.get("assetEntity") - if context_asset_doc: + context_folder_entity = instance.context.data.get("folderEntity") + if context_folder_entity: return - if instance.data.get("assetEntity"): - self.log.debug("Instance has set asset document in its data.") + if instance.data.get("folderEntity"): + self.log.debug("Instance has set fodler entity in its data.") elif instance.data.get("newAssetPublishing"): # skip if it is editorial @@ -30,6 +30,6 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin): else: raise PublishValidationError(( - "Instance \"{}\" doesn't have asset document " + "Instance \"{}\" doesn't have folder entity " "set which is needed for publishing." ).format(instance.data["name"])) diff --git a/client/ayon_core/plugins/publish/validate_editorial_asset_name.py b/client/ayon_core/plugins/publish/validate_editorial_asset_name.py deleted file mode 100644 index dd1a19f602..0000000000 --- a/client/ayon_core/plugins/publish/validate_editorial_asset_name.py +++ /dev/null @@ -1,130 +0,0 @@ -from pprint import pformat - -import pyblish.api - -from ayon_core.client import get_assets, get_asset_name_identifier - - -class ValidateEditorialAssetName(pyblish.api.ContextPlugin): - """ Validating if editorial's asset names are not already created in db. - - Checking variations of names with different size of caps or with - or without underscores. - """ - - order = pyblish.api.ValidatorOrder - label = "Validate Editorial Asset Name" - hosts = [ - "hiero", - "resolve", - "flame", - "traypublisher" - ] - - def process(self, context): - - asset_and_parents = self.get_parents(context) - self.log.debug("__ asset_and_parents: {}".format(asset_and_parents)) - - project_name = context.data["projectName"] - db_assets = list(get_assets( - project_name, fields=["name", "data.parents"] - )) - self.log.debug("__ db_assets: {}".format(db_assets)) - - asset_db_docs = { - get_asset_name_identifier(asset_doc): list( - asset_doc["data"]["parents"] - ) - for asset_doc in db_assets - } - - self.log.debug("__ project_entities: {}".format( - pformat(asset_db_docs))) - - assets_missing_name = {} - assets_wrong_parent = {} - for asset in asset_and_parents.keys(): - if asset not in asset_db_docs.keys(): - # add to some nonexistent list for next layer of check - assets_missing_name[asset] = asset_and_parents[asset] - continue - - if asset_and_parents[asset] != asset_db_docs[asset]: - # add to some nonexistent list for next layer of check - assets_wrong_parent[asset] = { - "required": asset_and_parents[asset], - "already_in_db": asset_db_docs[asset] - } - continue - - self.log.debug("correct asset: {}".format(asset)) - - if assets_missing_name: - wrong_names = {} - self.log.debug( - ">> assets_missing_name: {}".format(assets_missing_name)) - - # This will create set asset names - asset_names = { - a.lower().replace("_", "") for a in asset_db_docs - } - - for asset in assets_missing_name: - _asset = asset.lower().replace("_", "") - if _asset in asset_names: - wrong_names[asset].update( - { - "required_name": asset, - "used_variants_in_db": [ - a for a in asset_db_docs - if a.lower().replace("_", "") == _asset - ] - } - ) - - if wrong_names: - self.log.debug( - ">> wrong_names: {}".format(wrong_names)) - raise Exception( - "Some already existing asset name variants `{}`".format( - wrong_names)) - - if assets_wrong_parent: - self.log.debug( - ">> assets_wrong_parent: {}".format(assets_wrong_parent)) - raise Exception( - "Wrong parents on assets `{}`".format(assets_wrong_parent)) - - def _get_all_assets(self, input_dict): - """ Returns asset names in list. - - List contains all asset names including parents - """ - for key in input_dict.keys(): - # check if child key is available - if input_dict[key].get("childs"): - # loop deeper - self._get_all_assets( - input_dict[key]["childs"]) - else: - self.all_testing_assets.append(key) - - def get_parents(self, context): - return_dict = {} - for instance in context: - asset = instance.data["folderPath"] - families = instance.data.get("families", []) + [ - instance.data["family"] - ] - # filter out non-shot families - if "shot" not in families: - continue - - parents = instance.data["parents"] - - return_dict[asset] = [ - str(p["entity_name"]) for p in parents - if p["entity_type"].lower() != "project" - ] - return return_dict diff --git a/client/ayon_core/plugins/publish/validate_unique_subsets.py b/client/ayon_core/plugins/publish/validate_unique_subsets.py index 3144675c50..4badeb8112 100644 --- a/client/ayon_core/plugins/publish/validate_unique_subsets.py +++ b/client/ayon_core/plugins/publish/validate_unique_subsets.py @@ -1,15 +1,17 @@ from collections import defaultdict + import pyblish.api + from ayon_core.pipeline.publish import ( PublishXmlValidationError, ) -class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): +class ValidateProductUniqueness(pyblish.api.ContextPlugin): """Validate all product names are unique. This only validates whether the instances currently set to publish from - the workfile overlap one another for the asset + product they are publishing + the workfile overlap one another for the folder + product they are publishing to. This does not perform any check against existing publishes in the database @@ -17,28 +19,28 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): versioning. A product may appear twice to publish from the workfile if one - of them is set to publish to another asset than the other. + of them is set to publish to another folder than the other. """ - label = "Validate Subset Uniqueness" + label = "Validate Product Uniqueness" order = pyblish.api.ValidatorOrder families = ["*"] def process(self, context): - # Find instance per (asset,product) - instance_per_asset_product = defaultdict(list) + # Find instance per (folder,product) + instance_per_folder_product = defaultdict(list) for instance in context: # Ignore disabled instances if not instance.data.get('publish', True): continue - # Ignore instance without asset data - asset = instance.data.get("folderPath") - if asset is None: - self.log.warning("Instance found without `asset` data: " + # Ignore instance without folder data + folder_path = instance.data.get("folderPath") + if folder_path is None: + self.log.warning("Instance found without `folderPath` data: " "{}".format(instance.name)) continue @@ -50,16 +52,21 @@ class ValidateSubsetUniqueness(pyblish.api.ContextPlugin): ).format(instance.name)) continue - instance_per_asset_product[(asset, product_name)].append(instance) + instance_per_folder_product[(folder_path, product_name)].append( + instance + ) non_unique = [] - for (asset, product_name), instances in instance_per_asset_product.items(): - - # A single instance per asset, product is fine + for (folder_path, product_name), instances in ( + instance_per_folder_product.items() + ): + # A single instance per folder, product is fine if len(instances) < 2: continue - non_unique.append("{} > {}".format(asset, product_name)) + non_unique.append( + "{} > {}".format(folder_path, product_name) + ) if not non_unique: # All is ok diff --git a/client/ayon_core/resources/app_icons/zbrush.png b/client/ayon_core/resources/app_icons/zbrush.png new file mode 100644 index 0000000000..4b0662580c Binary files /dev/null and b/client/ayon_core/resources/app_icons/zbrush.png differ diff --git a/client/ayon_core/scripts/slates/slate_base/api.py b/client/ayon_core/scripts/slates/slate_base/api.py index cd64c68134..d1b4b22979 100644 --- a/client/ayon_core/scripts/slates/slate_base/api.py +++ b/client/ayon_core/scripts/slates/slate_base/api.py @@ -13,3 +13,21 @@ from .items import ( ) from .lib import create_slates from .example import example + + +__all__ = ( + "FontFactory", + "BaseObj", + "load_default_style", + "MainFrame", + "Layer", + "BaseItem", + "ItemImage", + "ItemRectangle", + "ItemPlaceHolder", + "ItemText", + "ItemTable", + "TableField", + "create_slates", + "example", +) diff --git a/client/ayon_core/scripts/slates/slate_base/base.py b/client/ayon_core/scripts/slates/slate_base/base.py index 35ef46769c..e1648c916a 100644 --- a/client/ayon_core/scripts/slates/slate_base/base.py +++ b/client/ayon_core/scripts/slates/slate_base/base.py @@ -82,20 +82,6 @@ class BaseObj: def main_style(self): return load_default_style() - def height(self): - raise NotImplementedError( - "Attribute `height` is not implemented for <{}>".format( - self.__clas__.__name__ - ) - ) - - def width(self): - raise NotImplementedError( - "Attribute `width` is not implemented for <{}>".format( - self.__clas__.__name__ - ) - ) - def collect_data(self): return None diff --git a/client/ayon_core/settings/lib.py b/client/ayon_core/settings/lib.py index 69525d5b86..3929818d31 100644 --- a/client/ayon_core/settings/lib.py +++ b/client/ayon_core/settings/lib.py @@ -5,7 +5,7 @@ import collections import copy import time -from ayon_core.client import get_ayon_server_api_connection +import ayon_api log = logging.getLogger(__name__) @@ -46,8 +46,7 @@ class _AyonSettingsCache: @classmethod def _use_bundles(cls): if _AyonSettingsCache.use_bundles is None: - con = get_ayon_server_api_connection() - major, minor, _, _, _ = con.get_server_version_tuple() + major, minor, _, _, _ = ayon_api.get_server_version_tuple() use_bundles = True if (major, minor) < (0, 3): use_bundles = False @@ -69,8 +68,7 @@ class _AyonSettingsCache: _AyonSettingsCache.variant = variant # Set the variant to global ayon api connection - con = get_ayon_server_api_connection() - con.set_default_settings_variant(variant) + ayon_api.set_default_settings_variant(variant) return _AyonSettingsCache.variant @classmethod @@ -81,23 +79,21 @@ class _AyonSettingsCache: def get_value_by_project(cls, project_name): cache_item = _AyonSettingsCache.cache_by_project_name[project_name] if cache_item.is_outdated: - con = get_ayon_server_api_connection() if cls._use_bundles(): - value = con.get_addons_settings( + value = ayon_api.get_addons_settings( bundle_name=cls._get_bundle_name(), project_name=project_name, variant=cls._get_variant() ) else: - value = con.get_addons_settings(project_name) + value = ayon_api.get_addons_settings(project_name) cache_item.update_value(value) return cache_item.get_value() @classmethod def _get_addon_versions_from_bundle(cls): - con = get_ayon_server_api_connection() expected_bundle = cls._get_bundle_name() - bundles = con.get_bundles()["bundles"] + bundles = ayon_api.get_bundles()["bundles"] bundle = next( ( bundle @@ -117,8 +113,7 @@ class _AyonSettingsCache: if cls._use_bundles(): addons = cls._get_addon_versions_from_bundle() else: - con = get_ayon_server_api_connection() - settings_data = con.get_addons_settings( + settings_data = ayon_api.get_addons_settings( only_values=False, variant=cls._get_variant() ) @@ -206,7 +201,7 @@ def get_current_project_settings(): Project name should be stored in environment variable `AYON_PROJECT_NAME`. This function should be used only in host context where environment variable must be set and should not happen that any part of process will - change the value of the enviornment variable. + change the value of the environment variable. """ project_name = os.environ.get("AYON_PROJECT_NAME") if not project_name: @@ -214,6 +209,3 @@ def get_current_project_settings(): "Missing context project in environemt variable `AYON_PROJECT_NAME`." ) return get_project_settings(project_name) - - - diff --git a/client/ayon_core/style/style.css b/client/ayon_core/style/style.css index fcc76b0bff..607fd1fa31 100644 --- a/client/ayon_core/style/style.css +++ b/client/ayon_core/style/style.css @@ -1067,6 +1067,38 @@ PixmapButton:disabled { font-size: 13pt; } +#PublisherVerticalScrollArea QScrollBar { + background: transparent; + margin: 0; + border: none; +} + +#PublisherVerticalScrollArea QScrollBar:horizontal { + height: 10px; + margin: 0; +} + +#PublisherVerticalScrollArea QScrollBar:vertical { + width: 10px; + margin: 0; +} + +#PublisherVerticalScrollArea QScrollBar::handle { + background: {color:bg-scroll-handle}; + border-radius: 4px; + margin: 1px; +} + +#PublisherVerticalScrollArea QScrollBar::handle:horizontal { + min-width: 20px; + min-height: 8px; +} + +#PublisherVerticalScrollArea QScrollBar::handle:vertical { + min-height: 20px; + min-width: 8px; +} + ValidationArtistMessage QLabel { font-size: 20pt; font-weight: bold; diff --git a/client/ayon_core/tools/adobe_webserver/app.py b/client/ayon_core/tools/adobe_webserver/app.py index b10509f484..7d97d7d66d 100644 --- a/client/ayon_core/tools/adobe_webserver/app.py +++ b/client/ayon_core/tools/adobe_webserver/app.py @@ -81,15 +81,19 @@ class WebServerTool: await client.connect() context = get_global_context() - project = context["project_name"] - asset = context["folder_path"] - task = context["task_name"] - log.info("Sending context change to {}-{}-{}".format(project, - asset, - task)) + project_name = context["project_name"] + folder_path = context["folder_path"] + task_name = context["task_name"] + log.info("Sending context change to {}{}/{}".format( + project_name, folder_path, task_name + )) - await client.call('{}.set_context'.format(host), - project=project, asset=asset, task=task) + await client.call( + '{}.set_context'.format(host), + project=project_name, + folder=folder_path, + task=task_name + ) await client.close() def port_occupied(self, host_name, port): diff --git a/client/ayon_core/tools/assetlinks/__init__.py b/client/ayon_core/tools/assetlinks/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/client/ayon_core/tools/assetlinks/widgets.py b/client/ayon_core/tools/assetlinks/widgets.py deleted file mode 100644 index 7db6243358..0000000000 --- a/client/ayon_core/tools/assetlinks/widgets.py +++ /dev/null @@ -1,155 +0,0 @@ -import collections -from ayon_core.client import ( - get_versions, - get_subsets, - get_assets, - get_output_link_versions, -) - -from qtpy import QtWidgets - - -class SimpleLinkView(QtWidgets.QWidget): - def __init__(self, dbcon, parent): - super(SimpleLinkView, self).__init__(parent=parent) - self.dbcon = dbcon - - # TODO: display selected target - - in_text = QtWidgets.QLabel("Inputs") - in_view = QtWidgets.QListWidget(parent=self) - out_text = QtWidgets.QLabel("Outputs") - out_view = QtWidgets.QListWidget(parent=self) - - layout = QtWidgets.QGridLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(in_text, 0, 0) - layout.addWidget(in_view, 1, 0) - layout.addWidget(out_text, 0, 1) - layout.addWidget(out_view, 1, 1) - - self._in_view = in_view - self._out_view = out_view - self._version_doc_to_process = None - - @property - def project_name(self): - return self.dbcon.current_project() - - def clear(self): - self._in_view.clear() - self._out_view.clear() - - def set_version(self, version_doc): - self.clear() - self._version_doc_to_process = version_doc - if version_doc and self.isVisible(): - self._fill_values() - - def showEvent(self, event): - super(SimpleLinkView, self).showEvent(event) - self._fill_values() - - def _fill_values(self): - if self._version_doc_to_process is None: - return - version_doc = self._version_doc_to_process - self._version_doc_to_process = None - self._fill_inputs(version_doc) - self._fill_outputs(version_doc) - - def _fill_inputs(self, version_doc): - version_ids = set() - for link in version_doc["data"].get("inputLinks", []): - # Backwards compatibility for "input" key used as "id" - if "id" not in link: - link_id = link["input"] - else: - link_id = link["id"] - version_ids.add(link_id) - - version_docs = list(get_versions( - self.project_name, - version_ids=version_ids, - fields=["name", "parent"] - )) - - versions_by_subset_id = collections.defaultdict(list) - for version_doc in version_docs: - subset_id = version_doc["parent"] - versions_by_subset_id[subset_id].append(version_doc) - - subset_docs = [] - if versions_by_subset_id: - subset_docs = list(get_subsets( - self.project_name, - subset_ids=versions_by_subset_id.keys(), - fields=["_id", "name", "parent"] - )) - - asset_docs = [] - subsets_by_asset_id = collections.defaultdict(list) - if subset_docs: - for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - subsets_by_asset_id[asset_id].append(subset_doc) - - asset_docs = list(get_assets( - self.project_name, - asset_ids=subsets_by_asset_id.keys(), - fields=["_id", "name"] - )) - - for asset_doc in asset_docs: - asset_id = asset_doc["_id"] - for subset_doc in subsets_by_asset_id[asset_id]: - subset_id = subset_doc["_id"] - for version_doc in versions_by_subset_id[subset_id]: - self._in_view.addItem("{} {} v{:0>3}".format( - asset_doc["name"], - subset_doc["name"], - version_doc["name"], - )) - - def _fill_outputs(self, version_doc): - version_docs = list(get_output_link_versions( - self.project_name, - version_doc["_id"], - fields=["name", "parent"] - )) - versions_by_subset_id = collections.defaultdict(list) - for version_doc in version_docs: - subset_id = version_doc["parent"] - versions_by_subset_id[subset_id].append(version_doc) - - subset_docs = [] - if versions_by_subset_id: - subset_docs = list(get_subsets( - self.project_name, - subset_ids=versions_by_subset_id.keys(), - fields=["_id", "name", "parent"] - )) - - asset_docs = [] - subsets_by_asset_id = collections.defaultdict(list) - if subset_docs: - for subset_doc in subset_docs: - asset_id = subset_doc["parent"] - subsets_by_asset_id[asset_id].append(subset_doc) - - asset_docs = list(get_assets( - self.project_name, - asset_ids=subsets_by_asset_id.keys(), - fields=["_id", "name"] - )) - - for asset_doc in asset_docs: - asset_id = asset_doc["_id"] - for subset_doc in subsets_by_asset_id[asset_id]: - subset_id = subset_doc["_id"] - for version_doc in versions_by_subset_id[subset_id]: - self._out_view.addItem("{} {} v{:0>3}".format( - asset_doc["name"], - subset_doc["name"], - version_doc["name"], - )) diff --git a/client/ayon_core/tools/ayon_utils/models/thumbnails.py b/client/ayon_core/tools/ayon_utils/models/thumbnails.py deleted file mode 100644 index 86d6f3cba3..0000000000 --- a/client/ayon_core/tools/ayon_utils/models/thumbnails.py +++ /dev/null @@ -1,118 +0,0 @@ -import collections - -import ayon_api - -from ayon_core.client.thumbnails import AYONThumbnailCache - -from .cache import NestedCacheItem - - -class ThumbnailsModel: - entity_cache_lifetime = 240 # In seconds - - def __init__(self): - self._thumbnail_cache = AYONThumbnailCache() - self._paths_cache = collections.defaultdict(dict) - self._folders_cache = NestedCacheItem( - levels=2, lifetime=self.entity_cache_lifetime) - self._versions_cache = NestedCacheItem( - levels=2, lifetime=self.entity_cache_lifetime) - - def reset(self): - self._paths_cache = collections.defaultdict(dict) - self._folders_cache.reset() - self._versions_cache.reset() - - def get_thumbnail_path(self, project_name, thumbnail_id): - return self._get_thumbnail_path(project_name, thumbnail_id) - - def get_folder_thumbnail_ids(self, project_name, folder_ids): - project_cache = self._folders_cache[project_name] - output = {} - missing_cache = set() - for folder_id in folder_ids: - cache = project_cache[folder_id] - if cache.is_valid: - output[folder_id] = cache.get_data() - else: - missing_cache.add(folder_id) - self._query_folder_thumbnail_ids(project_name, missing_cache) - for folder_id in missing_cache: - cache = project_cache[folder_id] - output[folder_id] = cache.get_data() - return output - - def get_version_thumbnail_ids(self, project_name, version_ids): - project_cache = self._versions_cache[project_name] - output = {} - missing_cache = set() - for version_id in version_ids: - cache = project_cache[version_id] - if cache.is_valid: - output[version_id] = cache.get_data() - else: - missing_cache.add(version_id) - self._query_version_thumbnail_ids(project_name, missing_cache) - for version_id in missing_cache: - cache = project_cache[version_id] - output[version_id] = cache.get_data() - return output - - def _get_thumbnail_path(self, project_name, thumbnail_id): - if not thumbnail_id: - return None - - project_cache = self._paths_cache[project_name] - if thumbnail_id in project_cache: - return project_cache[thumbnail_id] - - filepath = self._thumbnail_cache.get_thumbnail_filepath( - project_name, thumbnail_id - ) - if filepath is not None: - project_cache[thumbnail_id] = filepath - return filepath - - # 'ayon_api' had a bug, public function - # 'get_thumbnail_by_id' did not return output of - # 'ServerAPI' method. - con = ayon_api.get_server_api_connection() - result = con.get_thumbnail_by_id(project_name, thumbnail_id) - if result is None: - pass - - elif result.is_valid: - filepath = self._thumbnail_cache.store_thumbnail( - project_name, - thumbnail_id, - result.content, - result.content_type - ) - project_cache[thumbnail_id] = filepath - return filepath - - def _query_folder_thumbnail_ids(self, project_name, folder_ids): - if not project_name or not folder_ids: - return - - folders = ayon_api.get_folders( - project_name, - folder_ids=folder_ids, - fields=["id", "thumbnailId"] - ) - project_cache = self._folders_cache[project_name] - for folder in folders: - project_cache[folder["id"]] = folder["thumbnailId"] - - def _query_version_thumbnail_ids(self, project_name, version_ids): - if not project_name or not version_ids: - return - - versions = ayon_api.get_versions( - project_name, - version_ids=version_ids, - fields=["id", "thumbnailId"] - ) - project_cache = self._versions_cache[project_name] - for version in versions: - project_cache[version["id"]] = version["thumbnailId"] diff --git a/client/ayon_core/tools/ayon_utils/widgets/__init__.py b/client/ayon_core/tools/ayon_utils/widgets/__init__.py deleted file mode 100644 index a62bab6751..0000000000 --- a/client/ayon_core/tools/ayon_utils/widgets/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -from .projects_widget import ( - # ProjectsWidget, - ProjectsCombobox, - ProjectsQtModel, - ProjectSortFilterProxy, - PROJECT_NAME_ROLE, - PROJECT_IS_CURRENT_ROLE, - PROJECT_IS_ACTIVE_ROLE, - PROJECT_IS_LIBRARY_ROLE, -) - -from .folders_widget import ( - FoldersWidget, - FoldersQtModel, - FOLDERS_MODEL_SENDER_NAME, - SimpleFoldersWidget, -) - -from .tasks_widget import ( - TasksWidget, - TasksQtModel, - TASKS_MODEL_SENDER_NAME, -) -from .utils import ( - get_qt_icon, - RefreshThread, -) - - -__all__ = ( - # "ProjectsWidget", - "ProjectsCombobox", - "ProjectsQtModel", - "ProjectSortFilterProxy", - "PROJECT_NAME_ROLE", - "PROJECT_IS_CURRENT_ROLE", - "PROJECT_IS_ACTIVE_ROLE", - "PROJECT_IS_LIBRARY_ROLE", - - "FoldersWidget", - "FoldersQtModel", - "FOLDERS_MODEL_SENDER_NAME", - "SimpleFoldersWidget", - - "TasksWidget", - "TasksQtModel", - "TASKS_MODEL_SENDER_NAME", - - "get_qt_icon", - "RefreshThread", -) diff --git a/client/ayon_core/tools/ayon_utils/widgets/utils.py b/client/ayon_core/tools/ayon_utils/widgets/utils.py deleted file mode 100644 index ead8f4edb2..0000000000 --- a/client/ayon_core/tools/ayon_utils/widgets/utils.py +++ /dev/null @@ -1,109 +0,0 @@ -import os -from functools import partial - -from qtpy import QtCore, QtGui - -from ayon_core.tools.utils.lib import get_qta_icon_by_name_and_color - - -class RefreshThread(QtCore.QThread): - refresh_finished = QtCore.Signal(str) - - def __init__(self, thread_id, func, *args, **kwargs): - super(RefreshThread, self).__init__() - self._id = thread_id - self._callback = partial(func, *args, **kwargs) - self._exception = None - self._result = None - self.finished.connect(self._on_finish_callback) - - @property - def id(self): - return self._id - - @property - def failed(self): - return self._exception is not None - - def run(self): - try: - self._result = self._callback() - except Exception as exc: - self._exception = exc - - def get_result(self): - return self._result - - def _on_finish_callback(self): - """Trigger custom signal with thread id. - - Listening for 'finished' signal we make sure that execution of thread - finished and QThread object can be safely deleted. - """ - - self.refresh_finished.emit(self.id) - - -class _IconsCache: - """Cache for icons.""" - - _cache = {} - _default = None - - @classmethod - def _get_cache_key(cls, icon_def): - parts = [] - icon_type = icon_def["type"] - if icon_type == "path": - parts = [icon_type, icon_def["path"]] - - elif icon_type == "awesome-font": - parts = [icon_type, icon_def["name"], icon_def["color"]] - return "|".join(parts) - - @classmethod - def get_icon(cls, icon_def): - if not icon_def: - return None - icon_type = icon_def["type"] - cache_key = cls._get_cache_key(icon_def) - cache = cls._cache.get(cache_key) - if cache is not None: - return cache - - icon = None - if icon_type == "path": - path = icon_def["path"] - if os.path.exists(path): - icon = QtGui.QIcon(path) - - elif icon_type == "awesome-font": - icon_name = icon_def["name"] - icon_color = icon_def["color"] - icon = get_qta_icon_by_name_and_color(icon_name, icon_color) - if icon is None: - icon = get_qta_icon_by_name_and_color( - "fa.{}".format(icon_name), icon_color) - if icon is None: - icon = cls.get_default() - cls._cache[cache_key] = icon - return icon - - @classmethod - def get_default(cls): - pix = QtGui.QPixmap(1, 1) - pix.fill(QtCore.Qt.transparent) - return QtGui.QIcon(pix) - - -def get_qt_icon(icon_def): - """Returns icon from cache or creates new one. - - Args: - icon_def (dict[str, Any]): Icon definition. - - Returns: - QtGui.QIcon: Icon. - """ - - return _IconsCache.get_icon(icon_def) diff --git a/client/ayon_core/tools/ayon_utils/models/__init__.py b/client/ayon_core/tools/common_models/__init__.py similarity index 100% rename from client/ayon_core/tools/ayon_utils/models/__init__.py rename to client/ayon_core/tools/common_models/__init__.py diff --git a/client/ayon_core/tools/ayon_utils/models/cache.py b/client/ayon_core/tools/common_models/cache.py similarity index 100% rename from client/ayon_core/tools/ayon_utils/models/cache.py rename to client/ayon_core/tools/common_models/cache.py diff --git a/client/ayon_core/tools/ayon_utils/models/hierarchy.py b/client/ayon_core/tools/common_models/hierarchy.py similarity index 96% rename from client/ayon_core/tools/ayon_utils/models/hierarchy.py rename to client/ayon_core/tools/common_models/hierarchy.py index 10495cf10b..d8b28f020d 100644 --- a/client/ayon_core/tools/ayon_utils/models/hierarchy.py +++ b/client/ayon_core/tools/common_models/hierarchy.py @@ -380,6 +380,26 @@ class HierarchyModel(object): ) return items.get(folder_path) + def get_task_item_by_name( + self, project_name, folder_id, task_name, sender + ): + """Get task item by name and folder id. + + Args: + project_name (str): Project name. + folder_id (str): Folder id. + task_name (str): Task name. + sender (Union[str, None]): Who requested the task item. + + Returns: + Union[TaskItem, None]: Task item found by name and folder id. + + """ + for task_item in self.get_task_items(project_name, folder_id, sender): + if task_item.name == task_name: + return task_item + return None + def get_task_items(self, project_name, folder_id, sender): if not project_name or not folder_id: return [] diff --git a/client/ayon_core/tools/ayon_utils/models/projects.py b/client/ayon_core/tools/common_models/projects.py similarity index 100% rename from client/ayon_core/tools/ayon_utils/models/projects.py rename to client/ayon_core/tools/common_models/projects.py diff --git a/client/ayon_core/tools/ayon_utils/models/selection.py b/client/ayon_core/tools/common_models/selection.py similarity index 100% rename from client/ayon_core/tools/ayon_utils/models/selection.py rename to client/ayon_core/tools/common_models/selection.py diff --git a/client/ayon_core/client/thumbnails.py b/client/ayon_core/tools/common_models/thumbnails.py similarity index 62% rename from client/ayon_core/client/thumbnails.py rename to client/ayon_core/tools/common_models/thumbnails.py index dc649b9651..1c3aadc49f 100644 --- a/client/ayon_core/client/thumbnails.py +++ b/client/ayon_core/tools/common_models/thumbnails.py @@ -1,24 +1,19 @@ -"""Cache of thumbnails downloaded from AYON server. - -Thumbnails are cached to appdirs to predefined directory. - -This should be moved to thumbnails logic in pipeline but because it would -overflow OpenPype logic it's here for now. -""" - import os import time import collections +import ayon_api import appdirs +from .cache import NestedCacheItem + FileInfo = collections.namedtuple( "FileInfo", ("path", "size", "modification_time") ) -class AYONThumbnailCache: +class ThumbnailsCache: """Cache of thumbnails on local storage. Thumbnails are cached to appdirs to predefined directory. Each project has @@ -117,7 +112,7 @@ class AYONThumbnailCache: """ thumbnails_dir = self.get_thumbnails_dir() - # Skip if thumbnails dir does not exists yet + # Skip if thumbnails dir does not exist yet if not os.path.exists(thumbnails_dir): return @@ -227,3 +222,114 @@ class AYONThumbnailCache: os.utime(thumbnail_path, (current_time, current_time)) return thumbnail_path + + +class ThumbnailsModel: + entity_cache_lifetime = 240 # In seconds + + def __init__(self): + self._thumbnail_cache = ThumbnailsCache() + self._paths_cache = collections.defaultdict(dict) + self._folders_cache = NestedCacheItem( + levels=2, lifetime=self.entity_cache_lifetime) + self._versions_cache = NestedCacheItem( + levels=2, lifetime=self.entity_cache_lifetime) + + def reset(self): + self._paths_cache = collections.defaultdict(dict) + self._folders_cache.reset() + self._versions_cache.reset() + + def get_thumbnail_path(self, project_name, thumbnail_id): + return self._get_thumbnail_path(project_name, thumbnail_id) + + def get_folder_thumbnail_ids(self, project_name, folder_ids): + project_cache = self._folders_cache[project_name] + output = {} + missing_cache = set() + for folder_id in folder_ids: + cache = project_cache[folder_id] + if cache.is_valid: + output[folder_id] = cache.get_data() + else: + missing_cache.add(folder_id) + self._query_folder_thumbnail_ids(project_name, missing_cache) + for folder_id in missing_cache: + cache = project_cache[folder_id] + output[folder_id] = cache.get_data() + return output + + def get_version_thumbnail_ids(self, project_name, version_ids): + project_cache = self._versions_cache[project_name] + output = {} + missing_cache = set() + for version_id in version_ids: + cache = project_cache[version_id] + if cache.is_valid: + output[version_id] = cache.get_data() + else: + missing_cache.add(version_id) + self._query_version_thumbnail_ids(project_name, missing_cache) + for version_id in missing_cache: + cache = project_cache[version_id] + output[version_id] = cache.get_data() + return output + + def _get_thumbnail_path(self, project_name, thumbnail_id): + if not thumbnail_id: + return None + + project_cache = self._paths_cache[project_name] + if thumbnail_id in project_cache: + return project_cache[thumbnail_id] + + filepath = self._thumbnail_cache.get_thumbnail_filepath( + project_name, thumbnail_id + ) + if filepath is not None: + project_cache[thumbnail_id] = filepath + return filepath + + # 'ayon_api' had a bug, public function + # 'get_thumbnail_by_id' did not return output of + # 'ServerAPI' method. + con = ayon_api.get_server_api_connection() + result = con.get_thumbnail_by_id(project_name, thumbnail_id) + if result is None: + pass + + elif result.is_valid: + filepath = self._thumbnail_cache.store_thumbnail( + project_name, + thumbnail_id, + result.content, + result.content_type + ) + project_cache[thumbnail_id] = filepath + return filepath + + def _query_folder_thumbnail_ids(self, project_name, folder_ids): + if not project_name or not folder_ids: + return + + folders = ayon_api.get_folders( + project_name, + folder_ids=folder_ids, + fields=["id", "thumbnailId"] + ) + project_cache = self._folders_cache[project_name] + for folder in folders: + project_cache[folder["id"]] = folder["thumbnailId"] + + def _query_version_thumbnail_ids(self, project_name, version_ids): + if not project_name or not version_ids: + return + + versions = ayon_api.get_versions( + project_name, + version_ids=version_ids, + fields=["id", "thumbnailId"] + ) + project_cache = self._versions_cache[project_name] + for version in versions: + project_cache[version["id"]] = version["thumbnailId"] diff --git a/client/ayon_core/tools/context_dialog/window.py b/client/ayon_core/tools/context_dialog/window.py index e2c9f71aaa..828d771142 100644 --- a/client/ayon_core/tools/context_dialog/window.py +++ b/client/ayon_core/tools/context_dialog/window.py @@ -6,19 +6,17 @@ from qtpy import QtWidgets, QtCore, QtGui from ayon_core import style from ayon_core.lib.events import QueuedEventSystem -from ayon_core.tools.ayon_utils.models import ( +from ayon_core.tools.common_models import ( ProjectsModel, HierarchyModel, ) -from ayon_core.tools.ayon_utils.widgets import ( +from ayon_core.tools.utils import ( ProjectsCombobox, FoldersWidget, TasksWidget, -) -from ayon_core.tools.utils.lib import ( - center_window, get_ayon_qt_app, ) +from ayon_core.tools.utils.lib import center_window class SelectionModel(object): @@ -277,8 +275,8 @@ class ContextDialogController: def is_initial_context_valid(self): return self._initial_folder_found and self._initial_project_found - def set_initial_context(self, project_name=None, asset_name=None): - result = self._prepare_initial_context(project_name, asset_name) + def set_initial_context(self, project_name=None, folder_path=None): + result = self._prepare_initial_context(project_name, folder_path) self._initial_project_name = project_name self._initial_folder_id = result["folder_id"] @@ -352,7 +350,7 @@ class ContextDialogController: with open(self._output_path, "w") as stream: json.dump(self.get_selected_context(), stream, indent=4) - def _prepare_initial_context(self, project_name, asset_name): + def _prepare_initial_context(self, project_name, folder_path): project_found = True output = { "project_found": project_found, @@ -362,26 +360,26 @@ class ContextDialogController: "tasks_found": True, } if project_name is None: - asset_name = None + folder_path = None else: project = ayon_api.get_project(project_name) project_found = project is not None output["project_found"] = project_found - if not project_found or not asset_name: + if not project_found or not folder_path: return output - output["folder_label"] = asset_name + output["folder_label"] = folder_path folder_id = None folder_found = False # First try to find by path - folder = ayon_api.get_folder_by_path(project_name, asset_name) + folder = ayon_api.get_folder_by_path(project_name, folder_path) # Try to find by name if folder was not found by path - # - prevent to query by name if 'asset_name' contains '/' - if not folder and "/" not in asset_name: + # - prevent to query by name if 'folder_path' contains '/' + if not folder and "/" not in folder_path: folder = next( ayon_api.get_folders( - project_name, folder_names=[asset_name], fields=["id"]), + project_name, folder_names=[folder_path], fields=["id"]), None ) @@ -496,10 +494,10 @@ class ContextDialog(QtWidgets.QDialog): Context has 3 parts: - Project - - Asset + - Folder - Task - It is possible to predefine project and asset. In that case their widgets + It is possible to predefine project and folder. In that case their widgets will have passed preselected values and will be disabled. """ def __init__(self, controller=None, parent=None): @@ -521,7 +519,7 @@ class ContextDialog(QtWidgets.QDialog): # UI initialization main_splitter = QtWidgets.QSplitter(self) - # Left side widget contains project combobox and asset widget + # Left side widget contains project combobox and folders widget left_side_widget = QtWidgets.QWidget(main_splitter) project_combobox = ProjectsCombobox( @@ -531,7 +529,7 @@ class ContextDialog(QtWidgets.QDialog): ) project_combobox.set_select_item_visible(True) - # Assets widget + # Folders widget folders_widget = FoldersWidget( controller, parent=left_side_widget, @@ -661,11 +659,7 @@ class ContextDialog(QtWidgets.QDialog): self._controller.set_strict(enabled) def refresh(self): - """Refresh all widget one by one. - - When asset refresh is triggered we have to wait when is done so - this method continues with `_on_asset_widget_refresh_finished`. - """ + """Refresh all widget one by one.""" self._controller.reset() @@ -673,10 +667,10 @@ class ContextDialog(QtWidgets.QDialog): """Result of dialog.""" return self._controller.get_selected_context() - def set_context(self, project_name=None, asset_name=None): + def set_context(self, project_name=None, folder_path=None): """Set context which will be used and locked in dialog.""" - self._controller.set_initial_context(project_name, asset_name) + self._controller.set_initial_context(project_name, folder_path) def _on_projects_refresh(self): initial_context = self._controller.get_initial_context() @@ -784,14 +778,14 @@ class ContextDialog(QtWidgets.QDialog): def main( path_to_store, project_name=None, - asset_name=None, + folder_path=None, strict=True ): # Run Qt application app = get_ayon_qt_app() controller = ContextDialogController() controller.set_strict(strict) - controller.set_initial_context(project_name, asset_name) + controller.set_initial_context(project_name, folder_path) controller.set_output_json_path(path_to_store) window = ContextDialog(controller=controller) window.show() diff --git a/client/ayon_core/tools/creator/window.py b/client/ayon_core/tools/creator/window.py index 7bf65ea510..5bdc6da9b6 100644 --- a/client/ayon_core/tools/creator/window.py +++ b/client/ayon_core/tools/creator/window.py @@ -2,15 +2,15 @@ import sys import traceback import re +import ayon_api from qtpy import QtWidgets, QtCore -from ayon_core.client import get_asset_by_name, get_subsets from ayon_core import style from ayon_core.settings import get_current_project_settings from ayon_core.tools.utils.lib import qt_app_context from ayon_core.pipeline import ( get_current_project_name, - get_current_asset_name, + get_current_folder_path, get_current_task_name, ) from ayon_core.pipeline.create import ( @@ -216,20 +216,20 @@ class CreatorWindow(QtWidgets.QDialog): # Early exit if no folder path if not folder_path: self._build_menu() - self.echo("Asset name is required ..") + self.echo("Folder is required ..") self._set_valid_state(False) return project_name = get_current_project_name() - asset_doc = None + folder_entity = None if creator_plugin: - # Get the asset from the database which match with the name - asset_doc = get_asset_by_name( - project_name, folder_path, fields=["_id"] + # Get the folder from the database which match with the name + folder_entity = ayon_api.get_folder_by_path( + project_name, folder_path, fields={"id"} ) # Get plugin - if not asset_doc or not creator_plugin: + if not folder_entity or not creator_plugin: self._build_menu() if not creator_plugin: @@ -239,12 +239,16 @@ class CreatorWindow(QtWidgets.QDialog): self._set_valid_state(False) return - folder_id = asset_doc["_id"] + folder_id = folder_entity["id"] + task_name = get_current_task_name() + task_entity = ayon_api.get_task_by_name( + project_name, folder_id, task_name + ) # Calculate product name with Creator plugin product_name = creator_plugin.get_product_name( - project_name, folder_id, task_name, user_input_text + project_name, folder_entity, task_entity, user_input_text ) # Force replacement of prohibited symbols # QUESTION should Creator care about this and here should be only @@ -272,12 +276,12 @@ class CreatorWindow(QtWidgets.QDialog): self._product_name_input.setText(product_name) # Get all products of the current folder - subset_docs = get_subsets( - project_name, asset_ids=[folder_id], fields=["name"] + product_entities = ayon_api.get_products( + project_name, folder_ids={folder_id}, fields={"name"} ) existing_product_names = { - subset_doc["name"] - for subset_doc in subset_docs + product_entity["name"] + for product_entity in product_entities } existing_product_names_low = set( _name.lower() @@ -372,13 +376,13 @@ class CreatorWindow(QtWidgets.QDialog): self.setStyleSheet(style.load_stylesheet()) def refresh(self): - self._folder_path_input.setText(get_current_asset_name()) + self._folder_path_input.setText(get_current_folder_path()) self._creators_model.reset() product_types_smart_select = ( get_current_project_settings() - ["global"] + ["core"] ["tools"] ["creator"] ["product_types_smart_select"] diff --git a/client/ayon_core/tools/launcher/control.py b/client/ayon_core/tools/launcher/control.py index 8780b211f1..abd0cd78d8 100644 --- a/client/ayon_core/tools/launcher/control.py +++ b/client/ayon_core/tools/launcher/control.py @@ -1,7 +1,7 @@ from ayon_core.lib import Logger from ayon_core.lib.events import QueuedEventSystem from ayon_core.settings import get_project_settings -from ayon_core.tools.ayon_utils.models import ProjectsModel, HierarchyModel +from ayon_core.tools.common_models import ProjectsModel, HierarchyModel from .abstract import AbstractLauncherFrontEnd, AbstractLauncherBackend from .models import LauncherSelectionModel, ActionsModel diff --git a/client/ayon_core/tools/launcher/models/actions.py b/client/ayon_core/tools/launcher/models/actions.py index 6b9a33e57a..32df600c87 100644 --- a/client/ayon_core/tools/launcher/models/actions.py +++ b/client/ayon_core/tools/launcher/models/actions.py @@ -2,10 +2,13 @@ import os from ayon_core import resources from ayon_core.lib import Logger, AYONSettingsRegistry +from ayon_core.addon import AddonsManager from ayon_core.pipeline.actions import ( discover_launcher_actions, LauncherAction, + LauncherActionSelection, ) +from ayon_core.pipeline.workfile import should_use_last_workfile_on_launch # class Action: @@ -68,11 +71,6 @@ class ApplicationAction(LauncherAction): project_entities = {} _log = None - required_session_keys = ( - "AYON_PROJECT_NAME", - "AYON_FOLDER_PATH", - "AYON_TASK_NAME" - ) @property def log(self): @@ -80,18 +78,16 @@ class ApplicationAction(LauncherAction): self._log = Logger.get_logger(self.__class__.__name__) return self._log - def is_compatible(self, session): - for key in self.required_session_keys: - if not session.get(key): - return False + def is_compatible(self, selection): + if not selection.is_task_selected: + return False - project_name = session["AYON_PROJECT_NAME"] - project_entity = self.project_entities[project_name] + project_entity = self.project_entities[selection.project_name] apps = project_entity["attrib"].get("applications") if not apps or self.application.full_name not in apps: return False - project_settings = self.project_settings[project_name] + project_settings = self.project_settings[selection.project_name] only_available = project_settings["applications"]["only_available"] if only_available and not self.application.find_executable(): return False @@ -111,26 +107,23 @@ class ApplicationAction(LauncherAction): dialog.setDetailedText(details) dialog.exec_() - def process(self, session, **kwargs): + def process(self, selection, **kwargs): """Process the full Application action""" - from ayon_core.lib import ( - ApplictionExecutableNotFound, + from ayon_applications import ( + ApplicationExecutableNotFound, ApplicationLaunchFailed, ) - project_name = session["AYON_PROJECT_NAME"] - folder_path = session["AYON_FOLDER_PATH"] - task_name = session["AYON_TASK_NAME"] try: self.application.launch( - project_name=project_name, - folder_path=folder_path, - task_name=task_name, + project_name=selection.project_name, + folder_path=selection.folder_path, + task_name=selection.task_name, **self.data ) - except ApplictionExecutableNotFound as exc: + except ApplicationExecutableNotFound as exc: details = exc.details msg = exc.msg log_msg = str(msg) @@ -278,6 +271,8 @@ class ActionsModel: self._launcher_tool_reg = AYONSettingsRegistry("launcher_tool") + self._addons_manager = None + @property def log(self): if self._log is None: @@ -301,19 +296,19 @@ class ActionsModel: host_name, not_open_workfile_actions ): - from ayon_core.lib.applications import should_start_last_workfile - if identifier in not_open_workfile_actions: return not not_open_workfile_actions[identifier] task_name = None task_type = None if task_id is not None: - task = self._controller.get_task_entity(project_name, task_id) - task_name = task["name"] - task_type = task["taskType"] + task_entity = self._controller.get_task_entity( + project_name, task_id + ) + task_name = task_entity["name"] + task_type = task_entity["taskType"] - output = should_start_last_workfile( + output = should_use_last_workfile_on_launch( project_name, host_name, task_name, @@ -334,11 +329,11 @@ class ActionsModel: """ not_open_workfile_actions = self._get_no_last_workfile_for_context( project_name, folder_id, task_id) - session = self._prepare_session(project_name, folder_id, task_id) + selection = self._prepare_selection(project_name, folder_id, task_id) output = [] action_items = self._get_action_items(project_name) for identifier, action in self._get_action_objects().items(): - if not action.is_compatible(session): + if not action.is_compatible(selection): continue action_item = action_items[identifier] @@ -373,7 +368,7 @@ class ActionsModel: ) def trigger_action(self, project_name, folder_id, task_id, identifier): - session = self._prepare_session(project_name, folder_id, task_id) + selection = self._prepare_selection(project_name, folder_id, task_id) failed = False error_message = None action_label = identifier @@ -402,7 +397,7 @@ class ActionsModel: ) action.data["start_last_workfile"] = start_last_workfile - action.process(session) + action.process(selection) except Exception as exc: self.log.warning("Action trigger failed.", exc_info=True) failed = True @@ -418,6 +413,11 @@ class ActionsModel: } ) + def _get_addons_manager(self): + if self._addons_manager is None: + self._addons_manager = AddonsManager() + return self._addons_manager + def _get_no_last_workfile_reg_data(self): try: no_workfile_reg_data = self._launcher_tool_reg.get_item( @@ -439,29 +439,8 @@ class ActionsModel: .get(task_id, {}) ) - def _prepare_session(self, project_name, folder_id, task_id): - folder_path = None - if folder_id: - folder = self._controller.get_folder_entity( - project_name, folder_id) - if folder: - folder_path = folder["path"] - - task_name = None - if task_id: - task = self._controller.get_task_entity(project_name, task_id) - if task: - task_name = task["name"] - - return { - "AYON_PROJECT_NAME": project_name, - "AYON_FOLDER_PATH": folder_path, - "AYON_TASK_NAME": task_name, - # Deprecated - kept for backwards compatibility - "AVALON_PROJECT": project_name, - "AVALON_ASSET": folder_path, - "AVALON_TASK": task_name, - } + def _prepare_selection(self, project_name, folder_id, task_id): + return LauncherActionSelection(project_name, folder_id, task_id) def _get_discovered_action_classes(self): if self._discovered_actions is None: @@ -518,19 +497,16 @@ class ActionsModel: return action_items def _get_applications_action_classes(self): - from ayon_core.lib.applications import ( - CUSTOM_LAUNCH_APP_GROUPS, - ApplicationManager, - ) - actions = [] - manager = ApplicationManager() + addons_manager = self._get_addons_manager() + applications_addon = addons_manager.get_enabled_addon("applications") + if applications_addon is None: + return actions + + manager = applications_addon.get_applications_manager() for full_name, application in manager.applications.items(): - if ( - application.group.name in CUSTOM_LAUNCH_APP_GROUPS - or not application.enabled - ): + if not application.enabled: continue action = type( diff --git a/client/ayon_core/tools/launcher/ui/actions_widget.py b/client/ayon_core/tools/launcher/ui/actions_widget.py index 617f3b0c91..a225827418 100644 --- a/client/ayon_core/tools/launcher/ui/actions_widget.py +++ b/client/ayon_core/tools/launcher/ui/actions_widget.py @@ -4,7 +4,7 @@ import collections from qtpy import QtWidgets, QtCore, QtGui from ayon_core.tools.flickcharm import FlickCharm -from ayon_core.tools.ayon_utils.widgets import get_qt_icon +from ayon_core.tools.utils import get_qt_icon from .resources import get_options_image_path diff --git a/client/ayon_core/tools/launcher/ui/hierarchy_page.py b/client/ayon_core/tools/launcher/ui/hierarchy_page.py index 5b5f88a802..226a57930b 100644 --- a/client/ayon_core/tools/launcher/ui/hierarchy_page.py +++ b/client/ayon_core/tools/launcher/ui/hierarchy_page.py @@ -6,7 +6,7 @@ from ayon_core.tools.utils import ( SquareButton, RefreshButton, ) -from ayon_core.tools.ayon_utils.widgets import ( +from ayon_core.tools.utils import ( ProjectsCombobox, FoldersWidget, TasksWidget, diff --git a/client/ayon_core/tools/launcher/ui/projects_widget.py b/client/ayon_core/tools/launcher/ui/projects_widget.py index 729caf3232..e2af54b55d 100644 --- a/client/ayon_core/tools/launcher/ui/projects_widget.py +++ b/client/ayon_core/tools/launcher/ui/projects_widget.py @@ -1,12 +1,13 @@ from qtpy import QtWidgets, QtCore from ayon_core.tools.flickcharm import FlickCharm -from ayon_core.tools.utils import PlaceholderLineEdit, RefreshButton -from ayon_core.tools.ayon_utils.widgets import ( +from ayon_core.tools.utils import ( + PlaceholderLineEdit, + RefreshButton, ProjectsQtModel, ProjectSortFilterProxy, ) -from ayon_core.tools.ayon_utils.models import PROJECTS_MODEL_SENDER +from ayon_core.tools.common_models import PROJECTS_MODEL_SENDER class ProjectIconView(QtWidgets.QListView): diff --git a/client/ayon_core/tools/loader/abstract.py b/client/ayon_core/tools/loader/abstract.py index 33add0213b..7a7d335092 100644 --- a/client/ayon_core/tools/loader/abstract.py +++ b/client/ayon_core/tools/loader/abstract.py @@ -871,7 +871,7 @@ class FrontendLoaderController(_BaseLoaderController): # Site sync functions @abstractmethod - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Is site sync enabled. Site sync addon can be enabled but can be disabled per project. diff --git a/client/ayon_core/tools/loader/control.py b/client/ayon_core/tools/loader/control.py index d2ee1d890c..0c9bb369c7 100644 --- a/client/ayon_core/tools/loader/control.py +++ b/client/ayon_core/tools/loader/control.py @@ -6,7 +6,7 @@ import ayon_api from ayon_core.lib.events import QueuedEventSystem from ayon_core.pipeline import Anatomy, get_current_context from ayon_core.host import ILoadHost -from ayon_core.tools.ayon_utils.models import ( +from ayon_core.tools.common_models import ( ProjectsModel, HierarchyModel, NestedCacheItem, @@ -113,7 +113,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._products_model = ProductsModel(self) self._loader_actions_model = LoaderActionsModel(self) self._thumbnails_model = ThumbnailsModel() - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) @property def log(self): @@ -149,7 +149,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loader_actions_model.reset() self._projects_model.reset() self._thumbnails_model.reset() - self._site_sync_model.reset() + self._sitesync_model.reset() self._projects_model.refresh() @@ -240,7 +240,7 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): project_name, representation_ids) ) - action_items.extend(self._site_sync_model.get_site_sync_action_items( + action_items.extend(self._sitesync_model.get_sitesync_action_items( project_name, representation_ids) ) @@ -254,8 +254,8 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): version_ids, representation_ids ): - if self._site_sync_model.is_site_sync_action(identifier): - self._site_sync_model.trigger_action_item( + if self._sitesync_model.is_sitesync_action(identifier): + self._sitesync_model.trigger_action_item( identifier, project_name, representation_ids @@ -320,10 +320,10 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): context = get_current_context() folder_id = None project_name = context.get("project_name") - asset_name = context.get("folder_path") - if project_name and asset_name: + folder_path = context.get("folder_path") + if project_name and folder_path: folder = ayon_api.get_folder_by_path( - project_name, asset_name, fields=["id"] + project_name, folder_path, fields=["id"] ) if folder: folder_id = folder["id"] @@ -368,24 +368,24 @@ class LoaderController(BackendLoaderController, FrontendLoaderController): self._loaded_products_cache.update_data(product_ids) return self._loaded_products_cache.get_data() - def is_site_sync_enabled(self, project_name=None): - return self._site_sync_model.is_site_sync_enabled(project_name) + def is_sitesync_enabled(self, project_name=None): + return self._sitesync_model.is_sitesync_enabled(project_name) def get_active_site_icon_def(self, project_name): - return self._site_sync_model.get_active_site_icon_def(project_name) + return self._sitesync_model.get_active_site_icon_def(project_name) def get_remote_site_icon_def(self, project_name): - return self._site_sync_model.get_remote_site_icon_def(project_name) + return self._sitesync_model.get_remote_site_icon_def(project_name) def get_version_sync_availability(self, project_name, version_ids): - return self._site_sync_model.get_version_sync_availability( + return self._sitesync_model.get_version_sync_availability( project_name, version_ids ) def get_representations_sync_status( self, project_name, representation_ids ): - return self._site_sync_model.get_representations_sync_status( + return self._sitesync_model.get_representations_sync_status( project_name, representation_ids ) diff --git a/client/ayon_core/tools/loader/models/__init__.py b/client/ayon_core/tools/loader/models/__init__.py index 8e640659a0..10fd3da4d3 100644 --- a/client/ayon_core/tools/loader/models/__init__.py +++ b/client/ayon_core/tools/loader/models/__init__.py @@ -1,7 +1,7 @@ from .selection import SelectionModel from .products import ProductsModel from .actions import LoaderActionsModel -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/loader/models/actions.py b/client/ayon_core/tools/loader/models/actions.py index dff15ea16c..ad2993af50 100644 --- a/client/ayon_core/tools/loader/models/actions.py +++ b/client/ayon_core/tools/loader/models/actions.py @@ -1,29 +1,23 @@ import sys import traceback import inspect -import copy import collections import uuid -from ayon_core.client import ( - get_project, - get_assets, - get_subsets, - get_versions, - get_representations, -) +import ayon_api + from ayon_core.pipeline.load import ( discover_loader_plugins, - SubsetLoaderPlugin, + ProductLoaderPlugin, filter_repre_contexts_by_loader, get_loader_identifier, load_with_repre_context, - load_with_subset_context, - load_with_subset_contexts, + load_with_product_context, + load_with_product_contexts, LoadError, IncompatibleLoaderError, ) -from ayon_core.tools.ayon_utils.models import NestedCacheItem +from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ActionItem ACTIONS_MODEL_SENDER = "actions.model" @@ -33,14 +27,10 @@ NOT_SET = object() class LoaderActionsModel: """Model for loader actions. - This is probably only part of models that requires to use codebase from - 'ayon_core.client' because of backwards compatibility with loaders logic - which are expecting mongo documents. - TODOs: Deprecate 'qargparse' usage in loaders and implement conversion of 'ActionItem' to data (and 'from_data'). - Use controller to get entities (documents) -> possible only when + Use controller to get entities -> possible only when loaders are able to handle AYON vs. OpenPype logic. Add missing site sync logic, and if possible remove it from loaders. Implement loader actions to replace load plugins. @@ -317,7 +307,7 @@ class LoaderActionsModel: we want to show loaders for? Returns: - tuple[list[SubsetLoaderPlugin], list[LoaderPlugin]]: Discovered + tuple[list[ProductLoaderPlugin], list[LoaderPlugin]]: Discovered loader plugins. """ @@ -342,7 +332,7 @@ class LoaderActionsModel: identifier = get_loader_identifier(loader_cls) loaders_by_identifier[identifier] = loader_cls - if issubclass(loader_cls, SubsetLoaderPlugin): + if issubclass(loader_cls, ProductLoaderPlugin): product_loaders.append(loader_cls) else: repre_loaders.append(loader_cls) @@ -368,48 +358,15 @@ class LoaderActionsModel: return action_item.order, action_item.label - def _get_version_docs(self, project_name, version_ids): - """Get version documents for given version ids. - - This function also handles hero versions and copies data from - source version to it. - - Todos: - Remove this function when this is completely rewritten to - use AYON calls. - """ - - version_docs = list(get_versions( - project_name, version_ids=version_ids, hero=True - )) - hero_versions_by_src_id = collections.defaultdict(list) - src_hero_version = set() - for version_doc in version_docs: - if version_doc["type"] != "hero": - continue - version_id = "" - src_hero_version.add(version_id) - hero_versions_by_src_id[version_id].append(version_doc) - - src_versions = [] - if src_hero_version: - src_versions = get_versions(project_name, version_ids=version_ids) - for src_version in src_versions: - src_version_id = src_version["_id"] - for hero_version in hero_versions_by_src_id[src_version_id]: - hero_version["data"] = copy.deepcopy(src_version["data"]) - - return version_docs - def _contexts_for_versions(self, project_name, version_ids): """Get contexts for given version ids. - Prepare version contexts for 'SubsetLoaderPlugin' and representation + Prepare version contexts for 'ProductLoaderPlugin' and representation contexts for 'LoaderPlugin' for all children representations of given versions. This method is very similar to '_contexts_for_representations' but the - queries of documents are called in a different order. + queries of entities are called in a different order. Args: project_name (str): Project name. @@ -426,55 +383,59 @@ class LoaderActionsModel: if not project_name and not version_ids: return version_context_by_id, repre_context_by_id - version_docs = self._get_version_docs(project_name, version_ids) - version_docs_by_id = {} - version_docs_by_product_id = collections.defaultdict(list) - for version_doc in version_docs: - version_id = version_doc["_id"] - product_id = version_doc["parent"] - version_docs_by_id[version_id] = version_doc - version_docs_by_product_id[product_id].append(version_doc) + version_entities = ayon_api.get_versions( + project_name, version_ids=version_ids + ) + version_entities_by_id = {} + version_entities_by_product_id = collections.defaultdict(list) + for version_entity in version_entities: + version_id = version_entity["id"] + product_id = version_entity["productId"] + version_entities_by_id[version_id] = version_entity + version_entities_by_product_id[product_id].append(version_entity) - _product_ids = set(version_docs_by_product_id.keys()) - _product_docs = get_subsets(project_name, subset_ids=_product_ids) - product_docs_by_id = {p["_id"]: p for p in _product_docs} + _product_ids = set(version_entities_by_product_id.keys()) + _product_entities = ayon_api.get_products( + project_name, product_ids=_product_ids + ) + product_entities_by_id = {p["id"]: p for p in _product_entities} - _folder_ids = {p["parent"] for p in product_docs_by_id.values()} - _folder_docs = get_assets(project_name, asset_ids=_folder_ids) - folder_docs_by_id = {f["_id"]: f for f in _folder_docs} + _folder_ids = {p["folderId"] for p in product_entities_by_id.values()} + _folder_entities = ayon_api.get_folders( + project_name, folder_ids=_folder_ids + ) + folder_entities_by_id = {f["id"]: f for f in _folder_entities} - project_doc = get_project(project_name) - project_doc["code"] = project_doc["data"]["code"] + project_entity = ayon_api.get_project(project_name) - for version_doc in version_docs: - version_id = version_doc["_id"] - product_id = version_doc["parent"] - product_doc = product_docs_by_id[product_id] - folder_id = product_doc["parent"] - folder_doc = folder_docs_by_id[folder_id] + for version_id, version_entity in version_entities_by_id.items(): + product_id = version_entity["productId"] + product_entity = product_entities_by_id[product_id] + folder_id = product_entity["folderId"] + folder_entity = folder_entities_by_id[folder_id] version_context_by_id[version_id] = { - "project": project_doc, - "asset": folder_doc, - "subset": product_doc, - "version": version_doc, + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, } - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, version_ids=version_ids) - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = version_docs_by_id[version_id] - product_id = version_doc["parent"] - product_doc = product_docs_by_id[product_id] - folder_id = product_doc["parent"] - folder_doc = folder_docs_by_id[folder_id] + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + version_entity = version_entities_by_id[version_id] + product_id = version_entity["productId"] + product_entity = product_entities_by_id[product_id] + folder_id = product_entity["folderId"] + folder_entity = folder_entities_by_id[folder_id] - repre_context_by_id[repre_doc["_id"]] = { - "project": project_doc, - "asset": folder_doc, - "subset": product_doc, - "version": version_doc, - "representation": repre_doc, + repre_context_by_id[repre_entity["id"]] = { + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, + "representation": repre_entity, } return version_context_by_id, repre_context_by_id @@ -482,12 +443,12 @@ class LoaderActionsModel: def _contexts_for_representations(self, project_name, repre_ids): """Get contexts for given representation ids. - Prepare version contexts for 'SubsetLoaderPlugin' and representation + Prepare version contexts for 'ProductLoaderPlugin' and representation contexts for 'LoaderPlugin' for all children representations of given versions. This method is very similar to '_contexts_for_versions' but the - queries of documents are called in a different order. + queries of entities are called in a different order. Args: project_name (str): Project name. @@ -503,53 +464,58 @@ class LoaderActionsModel: if not project_name and not repre_ids: return product_context_by_id, repre_context_by_id - repre_docs = list(get_representations( + repre_entities = list(ayon_api.get_representations( project_name, representation_ids=repre_ids )) - version_ids = {r["parent"] for r in repre_docs} - version_docs = self._get_version_docs(project_name, version_ids) - version_docs_by_id = { - v["_id"]: v for v in version_docs + version_ids = {r["versionId"] for r in repre_entities} + version_entities = ayon_api.get_versions( + project_name, version_ids=version_ids + ) + version_entities_by_id = { + v["id"]: v for v in version_entities } - product_ids = {v["parent"] for v in version_docs_by_id.values()} - product_docs = get_subsets(project_name, subset_ids=product_ids) - product_docs_by_id = { - p["_id"]: p for p in product_docs + product_ids = {v["productId"] for v in version_entities_by_id.values()} + product_entities = ayon_api.get_products( + project_name, product_ids=product_ids + ) + product_entities_by_id = { + p["id"]: p for p in product_entities } - folder_ids = {p["parent"] for p in product_docs_by_id.values()} - folder_docs = get_assets(project_name, asset_ids=folder_ids) - folder_docs_by_id = { - f["_id"]: f for f in folder_docs + folder_ids = {p["folderId"] for p in product_entities_by_id.values()} + folder_entities = ayon_api.get_folders( + project_name, folder_ids=folder_ids + ) + folder_entities_by_id = { + f["id"]: f for f in folder_entities } - project_doc = get_project(project_name) - project_doc["code"] = project_doc["data"]["code"] + project_entity = ayon_api.get_project(project_name) - for product_id, product_doc in product_docs_by_id.items(): - folder_id = product_doc["parent"] - folder_doc = folder_docs_by_id[folder_id] + for product_id, product_entity in product_entities_by_id.items(): + folder_id = product_entity["folderId"] + folder_entity = folder_entities_by_id[folder_id] product_context_by_id[product_id] = { - "project": project_doc, - "asset": folder_doc, - "subset": product_doc, + "project": project_entity, + "folder": folder_entity, + "product": product_entity, } - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = version_docs_by_id[version_id] - product_id = version_doc["parent"] - product_doc = product_docs_by_id[product_id] - folder_id = product_doc["parent"] - folder_doc = folder_docs_by_id[folder_id] + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + version_entity = version_entities_by_id[version_id] + product_id = version_entity["productId"] + product_entity = product_entities_by_id[product_id] + folder_id = product_entity["folderId"] + folder_entity = folder_entities_by_id[folder_id] - repre_context_by_id[repre_doc["_id"]] = { - "project": project_doc, - "asset": folder_doc, - "subset": product_doc, - "version": version_doc, - "representation": repre_doc, + repre_context_by_id[repre_entity["id"]] = { + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, + "representation": repre_entity, } return product_context_by_id, repre_context_by_id @@ -594,10 +560,10 @@ class LoaderActionsModel: repre_product_ids = set() repre_folder_ids = set() for repre_context in filtered_repre_contexts: - repre_ids.add(repre_context["representation"]["_id"]) - repre_product_ids.add(repre_context["subset"]["_id"]) - repre_version_ids.add(repre_context["version"]["_id"]) - repre_folder_ids.add(repre_context["asset"]["_id"]) + repre_ids.add(repre_context["representation"]["id"]) + repre_product_ids.add(repre_context["product"]["id"]) + repre_version_ids.add(repre_context["version"]["id"]) + repre_folder_ids.add(repre_context["folder"]["id"]) item = self._create_loader_action_item( loader, @@ -611,13 +577,13 @@ class LoaderActionsModel: ) action_items.append(item) - # Subset Loaders. + # Product Loaders. version_ids = set(version_context_by_id.keys()) product_folder_ids = set() product_ids = set() for product_context in version_context_by_id.values(): - product_ids.add(product_context["subset"]["_id"]) - product_folder_ids.add(product_context["asset"]["_id"]) + product_ids.add(product_context["product"]["id"]) + product_folder_ids.add(product_context["folder"]["id"]) version_contexts = list(version_context_by_id.values()) for loader in product_loaders: @@ -643,44 +609,49 @@ class LoaderActionsModel: ): """Trigger version loader. - This triggers 'load' method of 'SubsetLoaderPlugin' for given version + This triggers 'load' method of 'ProductLoaderPlugin' for given version ids. Note: - Even when the plugin is 'SubsetLoaderPlugin' it actually expects + Even when the plugin is 'ProductLoaderPlugin' it actually expects versions and should be named 'VersionLoaderPlugin'. Because it is planned to refactor load system and introduce 'LoaderAction' plugins it is not relevant to change it anymore. Args: - loader (SubsetLoaderPlugin): Loader plugin to use. + loader (ProductLoaderPlugin): Loader plugin to use. options (dict): Option values for loader. project_name (str): Project name. version_ids (Iterable[str]): Version ids. """ - project_doc = get_project(project_name) - project_doc["code"] = project_doc["data"]["code"] + project_entity = ayon_api.get_project(project_name) - version_docs = self._get_version_docs(project_name, version_ids) - product_ids = {v["parent"] for v in version_docs} - product_docs = get_subsets(project_name, subset_ids=product_ids) - product_docs_by_id = {f["_id"]: f for f in product_docs} - folder_ids = {p["parent"] for p in product_docs_by_id.values()} - folder_docs = get_assets(project_name, asset_ids=folder_ids) - folder_docs_by_id = {f["_id"]: f for f in folder_docs} + version_entities = list(ayon_api.get_versions( + project_name, version_ids=version_ids + )) + product_ids = {v["productId"] for v in version_entities} + product_entities = ayon_api.get_products( + project_name, product_ids=product_ids + ) + product_entities_by_id = {p["id"]: p for p in product_entities} + folder_ids = {p["folderId"] for p in product_entities_by_id.values()} + folder_entities = ayon_api.get_folders( + project_name, folder_ids=folder_ids + ) + folder_entities_by_id = {f["id"]: f for f in folder_entities} product_contexts = [] - for version_doc in version_docs: - product_id = version_doc["parent"] - product_doc = product_docs_by_id[product_id] - folder_id = product_doc["parent"] - folder_doc = folder_docs_by_id[folder_id] + for version_entity in version_entities: + product_id = version_entity["productId"] + product_entity = product_entities_by_id[product_id] + folder_id = product_entity["folderId"] + folder_entity = folder_entities_by_id[folder_id] product_contexts.append({ - "project": project_doc, - "asset": folder_doc, - "subset": product_doc, - "version": version_doc, + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, }) return self._load_products_by_loader( @@ -698,7 +669,7 @@ class LoaderActionsModel: This triggers 'load' method of 'LoaderPlugin' for given representation ids. For that are prepared contexts for each representation, with - all parent documents. + all parent entities. Args: loader (LoaderPlugin): Loader plugin to use. @@ -707,34 +678,41 @@ class LoaderActionsModel: representation_ids (Iterable[str]): Representation ids. """ - project_doc = get_project(project_name) - project_doc["code"] = project_doc["data"]["code"] - repre_docs = list(get_representations( + project_entity = ayon_api.get_project(project_name) + repre_entities = list(ayon_api.get_representations( project_name, representation_ids=representation_ids )) - version_ids = {r["parent"] for r in repre_docs} - version_docs = self._get_version_docs(project_name, version_ids) - version_docs_by_id = {v["_id"]: v for v in version_docs} - product_ids = {v["parent"] for v in version_docs_by_id.values()} - product_docs = get_subsets(project_name, subset_ids=product_ids) - product_docs_by_id = {p["_id"]: p for p in product_docs} - folder_ids = {p["parent"] for p in product_docs_by_id.values()} - folder_docs = get_assets(project_name, asset_ids=folder_ids) - folder_docs_by_id = {f["_id"]: f for f in folder_docs} + version_ids = {r["versionId"] for r in repre_entities} + version_entities = ayon_api.get_versions( + project_name, version_ids=version_ids + ) + version_entities_by_id = {v["id"]: v for v in version_entities} + product_ids = { + v["productId"] for v in version_entities_by_id.values() + } + product_entities = ayon_api.get_products( + project_name, product_ids=product_ids + ) + product_entities_by_id = {p["id"]: p for p in product_entities} + folder_ids = {p["folderId"] for p in product_entities_by_id.values()} + folder_entities = ayon_api.get_folders( + project_name, folder_ids=folder_ids + ) + folder_entities_by_id = {f["id"]: f for f in folder_entities} repre_contexts = [] - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - version_doc = version_docs_by_id[version_id] - product_id = version_doc["parent"] - product_doc = product_docs_by_id[product_id] - folder_id = product_doc["parent"] - folder_doc = folder_docs_by_id[folder_id] + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + version_entity = version_entities_by_id[version_id] + product_id = version_entity["productId"] + product_entity = product_entities_by_id[product_id] + folder_id = product_entity["folderId"] + folder_entity = folder_entities_by_id[folder_id] repre_contexts.append({ - "project": project_doc, - "asset": folder_doc, - "subset": product_doc, - "version": version_doc, - "representation": repre_doc, + "project": project_entity, + "folder": folder_entity, + "product": product_entity, + "version": version_entity, + "representation": repre_entity, }) return self._load_representations_by_loader( @@ -747,18 +725,17 @@ class LoaderActionsModel: Args: loader (LoaderPlugin): Loader plugin to use. repre_contexts (list[dict]): Full info about selected - representations, containing repre, version, subset, asset and - project documents. + representations, containing repre, version, product, folder + and project entities. options (dict): Data from options. """ error_info = [] for repre_context in repre_contexts: - version_doc = repre_context["version"] - if version_doc["type"] == "hero_version": - version_name = "Hero" - else: - version_name = version_doc.get("name") + version_entity = repre_context["version"] + version = version_entity["version"] + if version < 0: + version = "Hero" try: load_with_repre_context( loader, @@ -772,8 +749,8 @@ class LoaderActionsModel: "Incompatible Loader", None, repre_context["representation"]["name"], - repre_context["subset"]["name"], - version_name + repre_context["product"]["name"], + version )) except Exception as exc: @@ -788,20 +765,20 @@ class LoaderActionsModel: str(exc), formatted_traceback, repre_context["representation"]["name"], - repre_context["subset"]["name"], - version_name + repre_context["product"]["name"], + version )) return error_info def _load_products_by_loader(self, loader, version_contexts, options): - """Triggers load with SubsetLoader type of loaders. + """Triggers load with ProductLoader type of loaders. Warning: - Plugin is named 'SubsetLoader' but version is passed to context + Plugin is named 'ProductLoader' but version is passed to context too. Args: - loader (SubsetLoder): Loader used to load. + loader (ProductLoader): Loader used to load. version_contexts (list[dict[str, Any]]): For context for each version. options (dict[str, Any]): Options for loader that user could fill. @@ -811,10 +788,10 @@ class LoaderActionsModel: if loader.is_multiple_contexts_compatible: product_names = [] for context in version_contexts: - product_name = context.get("subset", {}).get("name") or "N/A" + product_name = context.get("product", {}).get("name") or "N/A" product_names.append(product_name) try: - load_with_subset_contexts( + load_with_product_contexts( loader, version_contexts, options=options @@ -837,10 +814,10 @@ class LoaderActionsModel: else: for version_context in version_contexts: product_name = ( - version_context.get("subset", {}).get("name") or "N/A" + version_context.get("product", {}).get("name") or "N/A" ) try: - load_with_subset_context( + load_with_product_context( loader, version_context, options=options diff --git a/client/ayon_core/tools/loader/models/products.py b/client/ayon_core/tools/loader/models/products.py index 63547bef8b..812446a012 100644 --- a/client/ayon_core/tools/loader/models/products.py +++ b/client/ayon_core/tools/loader/models/products.py @@ -6,7 +6,7 @@ import ayon_api from ayon_api.operations import OperationsSession from ayon_core.style import get_default_entity_icon_color -from ayon_core.tools.ayon_utils.models import NestedCacheItem +from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ( ProductTypeItem, ProductItem, diff --git a/client/ayon_core/tools/loader/models/site_sync.py b/client/ayon_core/tools/loader/models/sitesync.py similarity index 70% rename from client/ayon_core/tools/loader/models/site_sync.py rename to client/ayon_core/tools/loader/models/sitesync.py index 2a6f1558ad..987510905b 100644 --- a/client/ayon_core/tools/loader/models/site_sync.py +++ b/client/ayon_core/tools/loader/models/sitesync.py @@ -1,10 +1,10 @@ import collections +from ayon_api import get_representations, get_versions_links + from ayon_core.lib import Logger -from ayon_core.client.entities import get_representations -from ayon_core.client import get_linked_representation_id from ayon_core.addon import AddonsManager -from ayon_core.tools.ayon_utils.models import NestedCacheItem +from ayon_core.tools.common_models import NestedCacheItem from ayon_core.tools.loader.abstract import ActionItem DOWNLOAD_IDENTIFIER = "sitesync.download" @@ -36,7 +36,7 @@ class SiteSyncModel: self._controller = controller self._site_icons = None - self._site_sync_enabled_cache = NestedCacheItem( + self._sitesync_enabled_cache = NestedCacheItem( levels=1, lifetime=self.lifetime ) self._active_site_cache = NestedCacheItem( @@ -57,17 +57,17 @@ class SiteSyncModel: ) manager = AddonsManager() - self._site_sync_addon = manager.get("sync_server") + self._sitesync_addon = manager.get("sitesync") def reset(self): self._site_icons = None - self._site_sync_enabled_cache.reset() + self._sitesync_enabled_cache.reset() self._active_site_cache.reset() self._remote_site_cache.reset() self._version_availability_cache.reset() self._repre_status_cache.reset() - def is_site_sync_enabled(self, project_name=None): + def is_sitesync_enabled(self, project_name=None): """Site sync is enabled for a project. Returns false if site sync addon is not available or enabled @@ -82,13 +82,13 @@ class SiteSyncModel: bool: Site sync is enabled. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return False - cache = self._site_sync_enabled_cache[project_name] + cache = self._sitesync_enabled_cache[project_name] if not cache.is_valid: enabled = True if project_name: - enabled = self._site_sync_addon.is_project_enabled( + enabled = self._sitesync_addon.is_project_enabled( project_name, single=True ) cache.update_data(enabled) @@ -107,8 +107,8 @@ class SiteSyncModel: cache = self._active_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_active_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_active_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -125,8 +125,8 @@ class SiteSyncModel: cache = self._remote_site_cache[project_name] if not cache.is_valid: site_name = None - if project_name and self._is_site_sync_addon_enabled(): - site_name = self._site_sync_addon.get_remote_site(project_name) + if project_name and self._is_sitesync_addon_enabled(): + site_name = self._sitesync_addon.get_remote_site(project_name) cache.update_data(site_name) return cache.get_data() @@ -140,7 +140,7 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None active_site = self.get_active_site(project_name) return self._get_site_icon_def(project_name, active_site) @@ -155,14 +155,14 @@ class SiteSyncModel: Union[dict[str, Any], None]: Site icon definition. """ - if not project_name or not self.is_site_sync_enabled(project_name): + if not project_name or not self.is_sitesync_enabled(project_name): return None remote_site = self.get_remote_site(project_name) return self._get_site_icon_def(project_name, remote_site) def _get_site_icon_def(self, project_name, site_name): # use different icon for studio even if provider is 'local_drive' - if site_name == self._site_sync_addon.DEFAULT_SITE: + if site_name == self._sitesync_addon.DEFAULT_SITE: provider = "studio" else: provider = self._get_provider_for_site(project_name, site_name) @@ -179,7 +179,7 @@ class SiteSyncModel: dict[str, tuple[int, int]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { version_id: _default_version_availability() for version_id in version_ids @@ -217,7 +217,7 @@ class SiteSyncModel: dict[str, tuple[float, float]] """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return { repre_id: _default_repre_status() for repre_id in representation_ids @@ -242,7 +242,7 @@ class SiteSyncModel: output[repre_id] = repre_cache.get_data() return output - def get_site_sync_action_items(self, project_name, representation_ids): + def get_sitesync_action_items(self, project_name, representation_ids): """ Args: @@ -253,7 +253,7 @@ class SiteSyncModel: list[ActionItem]: Actions that can be shown in loader. """ - if not self.is_site_sync_enabled(project_name): + if not self.is_sitesync_enabled(project_name): return [] repres_status = self.get_representations_sync_status( @@ -289,7 +289,7 @@ class SiteSyncModel: return action_items - def is_site_sync_action(self, identifier): + def is_sitesync_action(self, identifier): """Should be `identifier` handled by SiteSync. Args: @@ -322,43 +322,53 @@ class SiteSyncModel: active_site = self.get_active_site(project_name) remote_site = self.get_remote_site(project_name) - repre_docs = list(get_representations( - project_name, representation_ids=representation_ids - )) - product_type_by_repre_id = { - item["_id"]: item["context"]["family"] - for item in repre_docs + repre_entities_by_id = { + repre_entity["id"]: repre_entity + for repre_entity in get_representations( + project_name, representation_ids=representation_ids + ) } + # TODO get product type from product entity instead of 'context' + # on representation + product_type_by_repre_id = {} + for repre_id, repre_entity in repre_entities_by_id.items(): + repre_context = repre_entity["context"] + product_type = repre_context.get("product", {}).get("type") + if not product_type: + product_type = repre_context.get("family") + + product_type_by_repre_id[repre_id] = product_type for repre_id in representation_ids: + repre_entity = repre_entities_by_id.get(repre_id) product_type = product_type_by_repre_id[repre_id] if identifier == DOWNLOAD_IDENTIFIER: self._add_site( - project_name, repre_id, active_site, product_type + project_name, repre_entity, active_site, product_type ) elif identifier == UPLOAD_IDENTIFIER: self._add_site( - project_name, repre_id, remote_site, product_type + project_name, repre_entity, remote_site, product_type ) elif identifier == REMOVE_IDENTIFIER: - self._site_sync_addon.remove_site( + self._sitesync_addon.remove_site( project_name, repre_id, active_site, remove_local_files=True ) - def _is_site_sync_addon_enabled(self): + def _is_sitesync_addon_enabled(self): """ Returns: bool: Site sync addon is enabled. """ - if self._site_sync_addon is None: + if self._sitesync_addon is None: return False - return self._site_sync_addon.enabled + return self._sitesync_addon.enabled def _get_provider_for_site(self, project_name, site_name): """Provider for a site. @@ -371,9 +381,9 @@ class SiteSyncModel: Union[str, None]: Provider name. """ - if not self._is_site_sync_addon_enabled(): + if not self._is_sitesync_addon_enabled(): return None - return self._site_sync_addon.get_provider_for_site( + return self._sitesync_addon.get_provider_for_site( project_name, site_name ) @@ -388,7 +398,7 @@ class SiteSyncModel: return None if self._site_icons is None: - self._site_icons = self._site_sync_addon.get_site_icons() + self._site_icons = self._sitesync_addon.get_site_icons() return self._site_icons.get(provider) def _refresh_version_availability(self, project_name, version_ids): @@ -396,7 +406,7 @@ class SiteSyncModel: return project_cache = self._version_availability_cache[project_name] - avail_by_id = self._site_sync_addon.get_version_availability( + avail_by_id = self._sitesync_addon.get_version_availability( project_name, version_ids, self.get_active_site(project_name), @@ -415,7 +425,7 @@ class SiteSyncModel: return project_cache = self._repre_status_cache[project_name] status_by_repre_id = ( - self._site_sync_addon.get_representations_sync_state( + self._sitesync_addon.get_representations_sync_state( project_name, representation_ids, self.get_active_site(project_name), @@ -485,25 +495,25 @@ class SiteSyncModel: representation_ids=representation_ids, ) - def _add_site(self, project_name, repre_id, site_name, product_type): - self._site_sync_addon.add_site( - project_name, repre_id, site_name, force=True + def _add_site(self, project_name, repre_entity, site_name, product_type): + self._sitesync_addon.add_site( + project_name, repre_entity["id"], site_name, force=True ) # TODO this should happen in site sync addon if product_type != "workfile": return - links = get_linked_representation_id( + links = self._get_linked_representation_id( project_name, - repre_id=repre_id, - link_type="reference" + repre_entity, + "reference" ) for link_repre_id in links: try: print("Adding {} to linked representation: {}".format( site_name, link_repre_id)) - self._site_sync_addon.add_site( + self._sitesync_addon.add_site( project_name, link_repre_id, site_name, @@ -512,3 +522,83 @@ class SiteSyncModel: except Exception: # do not add/reset working site for references log.debug("Site present", exc_info=True) + + def _get_linked_representation_id( + self, + project_name, + repre_entity, + link_type, + max_depth=None + ): + """Returns list of linked ids of particular type (if provided). + + One of representation document or representation id must be passed. + Note: + Representation links now works only from representation through + version back to representations. + + Todos: + Missing depth query. Not sure how it did find more representations + in depth, probably links to version? + This function should probably live in sitesync addon? + + Args: + project_name (str): Name of project where look for links. + repre_entity (dict[str, Any]): Representation entity. + link_type (str): Type of link (e.g. 'reference', ...). + max_depth (int): Limit recursion level. Default: 0 + + Returns: + List[ObjectId] Linked representation ids. + """ + + if not repre_entity: + return [] + + version_id = repre_entity["versionId"] + if max_depth is None or max_depth == 0: + max_depth = 1 + + link_types = None + if link_type: + link_types = [link_type] + + # Store already found version ids to avoid recursion, and also to store + # output -> Don't forget to remove 'version_id' at the end!!! + linked_version_ids = {version_id} + # Each loop of depth will reset this variable + versions_to_check = {version_id} + for _ in range(max_depth): + if not versions_to_check: + break + + versions_links = get_versions_links( + project_name, + versions_to_check, + link_types=link_types, + link_direction="out") + + versions_to_check = set() + for links in versions_links.values(): + for link in links: + # Care only about version links + if link["entityType"] != "version": + continue + entity_id = link["entityId"] + # Skip already found linked version ids + if entity_id in linked_version_ids: + continue + linked_version_ids.add(entity_id) + versions_to_check.add(entity_id) + + linked_version_ids.remove(version_id) + if not linked_version_ids: + return [] + representations = get_representations( + project_name, + version_ids=linked_version_ids, + fields=["id"]) + return [ + repre["id"] + for repre in representations + ] diff --git a/client/ayon_core/tools/loader/ui/actions_utils.py b/client/ayon_core/tools/loader/ui/actions_utils.py index bf6ab6eeb5..5a988ef4c2 100644 --- a/client/ayon_core/tools/loader/ui/actions_utils.py +++ b/client/ayon_core/tools/loader/ui/actions_utils.py @@ -10,7 +10,7 @@ from ayon_core.tools.utils.widgets import ( OptionalAction, OptionDialog, ) -from ayon_core.tools.ayon_utils.widgets import get_qt_icon +from ayon_core.tools.utils import get_qt_icon def show_actions_menu(action_items, global_point, one_item_selected, parent): diff --git a/client/ayon_core/tools/loader/ui/folders_widget.py b/client/ayon_core/tools/loader/ui/folders_widget.py index 34881ab49d..7b146456da 100644 --- a/client/ayon_core/tools/loader/ui/folders_widget.py +++ b/client/ayon_core/tools/loader/ui/folders_widget.py @@ -7,11 +7,11 @@ from ayon_core.tools.utils import ( ) from ayon_core.style import get_objected_colors -from ayon_core.tools.ayon_utils.widgets import ( +from ayon_core.tools.utils import ( FoldersQtModel, FOLDERS_MODEL_SENDER_NAME, ) -from ayon_core.tools.ayon_utils.widgets.folders_widget import FOLDER_ID_ROLE +from ayon_core.tools.utils.folders_widget import FOLDER_ID_ROLE if qtpy.API == "pyside": from PySide.QtGui import QStyleOptionViewItemV4 diff --git a/client/ayon_core/tools/loader/ui/product_types_widget.py b/client/ayon_core/tools/loader/ui/product_types_widget.py index 26244517ec..180994fd7f 100644 --- a/client/ayon_core/tools/loader/ui/product_types_widget.py +++ b/client/ayon_core/tools/loader/ui/product_types_widget.py @@ -1,6 +1,6 @@ from qtpy import QtWidgets, QtGui, QtCore -from ayon_core.tools.ayon_utils.widgets import get_qt_icon +from ayon_core.tools.utils import get_qt_icon PRODUCT_TYPE_ROLE = QtCore.Qt.UserRole + 1 diff --git a/client/ayon_core/tools/loader/ui/products_delegates.py b/client/ayon_core/tools/loader/ui/products_delegates.py index 53d35c2bb7..12ed1165ae 100644 --- a/client/ayon_core/tools/loader/ui/products_delegates.py +++ b/client/ayon_core/tools/loader/ui/products_delegates.py @@ -50,9 +50,7 @@ class VersionComboBox(QtWidgets.QComboBox): item = self._items_by_id.get(version_id) if item is None: - label = format_version( - abs(version_item.version), version_item.is_hero - ) + label = format_version(version_item.version) item = QtGui.QStandardItem(label) item.setData(version_id, QtCore.Qt.UserRole) self._items_by_id[version_id] = item @@ -85,7 +83,7 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate): def displayText(self, value, locale): if not isinstance(value, numbers.Integral): return "N/A" - return format_version(abs(value), value < 0) + return format_version(value) def paint(self, painter, option, index): fg_color = index.data(QtCore.Qt.ForegroundRole) diff --git a/client/ayon_core/tools/loader/ui/products_model.py b/client/ayon_core/tools/loader/ui/products_model.py index 331efad68a..b465679c3b 100644 --- a/client/ayon_core/tools/loader/ui/products_model.py +++ b/client/ayon_core/tools/loader/ui/products_model.py @@ -4,7 +4,7 @@ import qtawesome from qtpy import QtGui, QtCore from ayon_core.style import get_default_entity_icon_color -from ayon_core.tools.ayon_utils.widgets import get_qt_icon +from ayon_core.tools.utils import get_qt_icon PRODUCTS_MODEL_SENDER_NAME = "qt_products_model" @@ -73,7 +73,7 @@ class ProductsModel(QtGui.QStandardItemModel): published_time_col = column_labels.index("Time") folders_label_col = column_labels.index("Folder") in_scene_col = column_labels.index("In scene") - site_sync_avail_col = column_labels.index("Availability") + sitesync_avail_col = column_labels.index("Availability") def __init__(self, controller): super(ProductsModel, self).__init__() @@ -284,7 +284,13 @@ class ProductsModel(QtGui.QStandardItemModel): model_item.setData(label, QtCore.Qt.DisplayRole) return model_item - def _set_version_data_to_product_item(self, model_item, version_item): + def _set_version_data_to_product_item( + self, + model_item, + version_item, + repre_count_by_version_id=None, + sync_availability_by_version_id=None, + ): """ Args: @@ -292,6 +298,10 @@ class ProductsModel(QtGui.QStandardItemModel): from version item. version_item (VersionItem): Item from entities model with information about version. + repre_count_by_version_id (Optional[str, int]): Mapping of + representation count by version id. + sync_availability_by_version_id (Optional[str, Tuple[int, int]]): + Mapping of sync availability by version id. """ model_item.setData(version_item.version_id, VERSION_ID_ROLE) @@ -312,12 +322,20 @@ class ProductsModel(QtGui.QStandardItemModel): # TODO call site sync methods for all versions at once project_name = self._last_project_name version_id = version_item.version_id - repre_count = self._controller.get_versions_representation_count( - project_name, [version_id] - )[version_id] - active, remote = self._controller.get_version_sync_availability( - project_name, [version_id] - )[version_id] + if repre_count_by_version_id is None: + repre_count_by_version_id = ( + self._controller.get_versions_representation_count( + project_name, [version_id] + ) + ) + if sync_availability_by_version_id is None: + sync_availability_by_version_id = ( + self._controller.get_version_sync_availability( + project_name, [version_id] + ) + ) + repre_count = repre_count_by_version_id[version_id] + active, remote = sync_availability_by_version_id[version_id] model_item.setData(repre_count, REPRESENTATIONS_COUNT_ROLE) model_item.setData(active, SYNC_ACTIVE_SITE_AVAILABILITY) @@ -327,7 +345,9 @@ class ProductsModel(QtGui.QStandardItemModel): self, product_item, active_site_icon, - remote_site_icon + remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ): model_item = self._items_by_id.get(product_item.product_id) versions = list(product_item.version_items.values()) @@ -357,7 +377,12 @@ class ProductsModel(QtGui.QStandardItemModel): model_item.setData(active_site_icon, ACTIVE_SITE_ICON_ROLE) model_item.setData(remote_site_icon, REMOTE_SITE_ICON_ROLE) - self._set_version_data_to_product_item(model_item, last_version) + self._set_version_data_to_product_item( + model_item, + last_version, + repre_count_by_version_id, + sync_availability_by_version_id, + ) return model_item def get_last_project_name(self): @@ -387,6 +412,24 @@ class ProductsModel(QtGui.QStandardItemModel): product_item.product_id: product_item for product_item in product_items } + last_version_id_by_product_id = {} + for product_item in product_items: + versions = list(product_item.version_items.values()) + versions.sort() + last_version = versions[-1] + last_version_id_by_product_id[product_item.product_id] = ( + last_version.version_id + ) + + version_ids = set(last_version_id_by_product_id.values()) + repre_count_by_version_id = self._controller.get_versions_representation_count( + project_name, version_ids + ) + sync_availability_by_version_id = ( + self._controller.get_version_sync_availability( + project_name, version_ids + ) + ) # Prepare product groups product_name_matches_by_group = collections.defaultdict(dict) @@ -443,6 +486,8 @@ class ProductsModel(QtGui.QStandardItemModel): product_item, active_site_icon, remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ) new_items.append(item) @@ -463,6 +508,8 @@ class ProductsModel(QtGui.QStandardItemModel): product_item, active_site_icon, remote_site_icon, + repre_count_by_version_id, + sync_availability_by_version_id, ) new_merged_items.append(item) merged_product_types.add(product_item.product_type) diff --git a/client/ayon_core/tools/loader/ui/products_widget.py b/client/ayon_core/tools/loader/ui/products_widget.py index 3025ec18bd..d9f027153e 100644 --- a/client/ayon_core/tools/loader/ui/products_widget.py +++ b/client/ayon_core/tools/loader/ui/products_widget.py @@ -139,9 +139,9 @@ class ProductsWidget(QtWidgets.QWidget): products_view.setItemDelegateForColumn( products_model.in_scene_col, in_scene_delegate) - site_sync_delegate = SiteSyncDelegate() + sitesync_delegate = SiteSyncDelegate() products_view.setItemDelegateForColumn( - products_model.site_sync_avail_col, site_sync_delegate) + products_model.sitesync_avail_col, sitesync_delegate) main_layout = QtWidgets.QHBoxLayout(self) main_layout.setContentsMargins(0, 0, 0, 0) @@ -176,7 +176,7 @@ class ProductsWidget(QtWidgets.QWidget): self._version_delegate = version_delegate self._time_delegate = time_delegate self._in_scene_delegate = in_scene_delegate - self._site_sync_delegate = site_sync_delegate + self._sitesync_delegate = sitesync_delegate self._selected_project_name = None self._selected_folder_ids = set() @@ -192,8 +192,8 @@ class ProductsWidget(QtWidgets.QWidget): products_model.in_scene_col, not controller.is_loaded_products_supported() ) - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) def set_name_filter(self, name): @@ -229,10 +229,10 @@ class ProductsWidget(QtWidgets.QWidget): def refresh(self): self._refresh_model() - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._products_view.setColumnHidden( - self._products_model.site_sync_avail_col, - not site_sync_enabled + self._products_model.sitesync_avail_col, + not sitesync_enabled ) def _fill_version_editor(self): @@ -395,10 +395,10 @@ class ProductsWidget(QtWidgets.QWidget): def _on_folders_selection_change(self, event): project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) self._selected_project_name = project_name self._selected_folder_ids = event["folder_ids"] self._refresh_model() diff --git a/client/ayon_core/tools/loader/ui/repres_widget.py b/client/ayon_core/tools/loader/ui/repres_widget.py index 27db8dda40..d19ad306a3 100644 --- a/client/ayon_core/tools/loader/ui/repres_widget.py +++ b/client/ayon_core/tools/loader/ui/repres_widget.py @@ -4,7 +4,7 @@ from qtpy import QtWidgets, QtGui, QtCore import qtawesome from ayon_core.style import get_default_entity_icon_color -from ayon_core.tools.ayon_utils.widgets import get_qt_icon +from ayon_core.tools.utils import get_qt_icon from ayon_core.tools.utils import DeselectableTreeView from .actions_utils import show_actions_menu @@ -307,8 +307,8 @@ class RepresentationsWidget(QtWidgets.QWidget): self._repre_model = repre_model self._repre_proxy_model = repre_proxy_model - self._set_site_sync_visibility( - self._controller.is_site_sync_enabled() + self._set_sitesync_visibility( + self._controller.is_sitesync_enabled() ) self._set_multiple_folders_selected(False) @@ -320,19 +320,19 @@ class RepresentationsWidget(QtWidgets.QWidget): def _on_project_change(self, event): self._selected_project_name = event["project_name"] - site_sync_enabled = self._controller.is_site_sync_enabled( + sitesync_enabled = self._controller.is_sitesync_enabled( self._selected_project_name ) - self._set_site_sync_visibility(site_sync_enabled) + self._set_sitesync_visibility(sitesync_enabled) - def _set_site_sync_visibility(self, site_sync_enabled): + def _set_sitesync_visibility(self, sitesync_enabled): self._repre_view.setColumnHidden( self._repre_model.active_site_column, - not site_sync_enabled + not sitesync_enabled ) self._repre_view.setColumnHidden( self._repre_model.remote_site_column, - not site_sync_enabled + not sitesync_enabled ) def _set_multiple_folders_selected(self, selected_multiple_folders): diff --git a/client/ayon_core/tools/loader/ui/window.py b/client/ayon_core/tools/loader/ui/window.py index 104b64d81c..3a6f4679fa 100644 --- a/client/ayon_core/tools/loader/ui/window.py +++ b/client/ayon_core/tools/loader/ui/window.py @@ -10,7 +10,7 @@ from ayon_core.tools.utils import ( GoToCurrentButton, ) from ayon_core.tools.utils.lib import center_window -from ayon_core.tools.ayon_utils.widgets import ProjectsCombobox +from ayon_core.tools.utils import ProjectsCombobox from ayon_core.tools.loader.control import LoaderController from .folders_widget import LoaderFoldersWidget diff --git a/client/ayon_core/tools/publisher/control.py b/client/ayon_core/tools/publisher/control.py index 712142f662..ede772b917 100644 --- a/client/ayon_core/tools/publisher/control.py +++ b/client/ayon_core/tools/publisher/control.py @@ -14,10 +14,6 @@ import arrow import pyblish.api import ayon_api -from ayon_core.client import ( - get_asset_by_name, - get_subsets, -) from ayon_core.lib.events import QueuedEventSystem from ayon_core.lib.attribute_definitions import ( UIDef, @@ -42,7 +38,7 @@ from ayon_core.pipeline.create.context import ( ConvertorsOperationFailed, ) from ayon_core.pipeline.publish import get_publish_instance_label -from ayon_core.tools.ayon_utils.models import HierarchyModel +from ayon_core.tools.common_models import HierarchyModel # Define constant for plugin orders offset PLUGIN_ORDER_OFFSET = 0.5 @@ -66,24 +62,6 @@ class MainThreadItem: self.callback(*self.args, **self.kwargs) -class AssetDocsCache: - """Cache asset documents for creation part.""" - - def __init__(self, controller): - self._controller = controller - self._asset_docs_by_path = {} - - def reset(self): - self._asset_docs_by_path = {} - - def get_asset_doc_by_folder_path(self, folder_path): - if folder_path not in self._asset_docs_by_path: - project_name = self._controller.project_name - asset_doc = get_asset_by_name(project_name, folder_path) - self._asset_docs_by_path[folder_path] = asset_doc - return copy.deepcopy(self._asset_docs_by_path[folder_path]) - - class PublishReportMaker: """Report for single publishing process. @@ -1653,7 +1631,6 @@ class PublisherController(BasePublisherController): # Cacher of avalon documents self._hierarchy_model = HierarchyModel(self) - self._asset_docs_cache = AssetDocsCache(self) @property def project_name(self): @@ -1673,7 +1650,7 @@ class PublisherController(BasePublisherController): Union[str, None]: Folder path or None if folder is not set. """ - return self._create_context.get_current_asset_name() + return self._create_context.get_current_folder_path() @property def current_task_name(self): @@ -1796,14 +1773,14 @@ class PublisherController(BasePublisherController): if not folder_item: return None - subset_docs = get_subsets( + product_entities = ayon_api.get_products( project_name, - asset_ids=[folder_item.entity_id], - fields=["name"] + folder_ids={folder_item.entity_id}, + fields={"name"} ) return { - subset_doc["name"] - for subset_doc in subset_docs + product_entity["name"] + for product_entity in product_entities } def reset(self): @@ -1816,11 +1793,10 @@ class PublisherController(BasePublisherController): self._create_context.reset_preparation() - # Reset avalon context + # Reset current context self._create_context.reset_current_context() self._hierarchy_model.reset() - self._asset_docs_cache.reset() self._reset_plugins() # Publish part must be reset after plugins @@ -2052,16 +2028,37 @@ class PublisherController(BasePublisherController): """ creator = self._creators[creator_identifier] - project_name = self.project_name - asset_doc = self._asset_docs_cache.get_asset_doc_by_folder_path( - folder_path - ) + instance = None if instance_id: instance = self.instances[instance_id] + project_name = self.project_name + folder_item = self._hierarchy_model.get_folder_item_by_path( + project_name, folder_path + ) + folder_entity = None + task_item = None + task_entity = None + if folder_item is not None: + folder_entity = self._hierarchy_model.get_folder_entity( + project_name, folder_item.entity_id + ) + task_item = self._hierarchy_model.get_task_item_by_name( + project_name, folder_item.entity_id, task_name, "controller" + ) + + if task_item is not None: + task_entity = self._hierarchy_model.get_task_entity( + project_name, task_item.task_id + ) + return creator.get_product_name( - project_name, asset_doc, task_name, variant, instance=instance + project_name, + folder_entity, + task_entity, + variant, + instance=instance ) def trigger_convertor_items(self, convertor_identifiers): diff --git a/client/ayon_core/tools/publisher/control_qt.py b/client/ayon_core/tools/publisher/control_qt.py index ee08899cac..bef3a5af3b 100644 --- a/client/ayon_core/tools/publisher/control_qt.py +++ b/client/ayon_core/tools/publisher/control_qt.py @@ -343,8 +343,9 @@ class QtRemotePublishController(BasePublisherController): @abstractmethod def _send_instance_changes_to_client(self): - instance_changes = self._get_instance_changes_for_client() - # Implement to send 'instance_changes' value to client + # TODO Implement to send 'instance_changes' value to client + # instance_changes = self._get_instance_changes_for_client() + pass @abstractmethod def save_changes(self): diff --git a/client/ayon_core/tools/publisher/widgets/create_context_widgets.py b/client/ayon_core/tools/publisher/widgets/create_context_widgets.py index d65a2ace8d..235a778d0f 100644 --- a/client/ayon_core/tools/publisher/widgets/create_context_widgets.py +++ b/client/ayon_core/tools/publisher/widgets/create_context_widgets.py @@ -1,10 +1,10 @@ -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore from ayon_core.lib.events import QueuedEventSystem from ayon_core.tools.utils import PlaceholderLineEdit, GoToCurrentButton -from ayon_core.tools.ayon_utils.models import HierarchyExpectedSelection -from ayon_core.tools.ayon_utils.widgets import FoldersWidget, TasksWidget +from ayon_core.tools.common_models import HierarchyExpectedSelection +from ayon_core.tools.utils import FoldersWidget, TasksWidget class CreateSelectionModel(object): diff --git a/client/ayon_core/tools/publisher/widgets/folders_dialog.py b/client/ayon_core/tools/publisher/widgets/folders_dialog.py index 8f93264b2e..8dce7aba3a 100644 --- a/client/ayon_core/tools/publisher/widgets/folders_dialog.py +++ b/client/ayon_core/tools/publisher/widgets/folders_dialog.py @@ -1,8 +1,7 @@ -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets from ayon_core.lib.events import QueuedEventSystem -from ayon_core.tools.ayon_utils.widgets import FoldersWidget -from ayon_core.tools.utils import PlaceholderLineEdit +from ayon_core.tools.utils import PlaceholderLineEdit, FoldersWidget class FoldersDialogController: diff --git a/client/ayon_core/tools/publisher/widgets/publish_frame.py b/client/ayon_core/tools/publisher/widgets/publish_frame.py index d423f97047..ee65c69c19 100644 --- a/client/ayon_core/tools/publisher/widgets/publish_frame.py +++ b/client/ayon_core/tools/publisher/widgets/publish_frame.py @@ -1,7 +1,3 @@ -import os -import json -import time - from qtpy import QtWidgets, QtCore from .widgets import ( diff --git a/client/ayon_core/tools/publisher/widgets/report_page.py b/client/ayon_core/tools/publisher/widgets/report_page.py index 1bbe8033f9..7475b39f52 100644 --- a/client/ayon_core/tools/publisher/widgets/report_page.py +++ b/client/ayon_core/tools/publisher/widgets/report_page.py @@ -56,6 +56,8 @@ class VerticalScrollArea(QtWidgets.QScrollArea): self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded) self.setLayoutDirection(QtCore.Qt.RightToLeft) + self.setObjectName("PublisherVerticalScrollArea") + self.setAttribute(QtCore.Qt.WA_TranslucentBackground) # Background of scrollbar will be transparent scrollbar_bg = self.verticalScrollBar().parent() @@ -500,7 +502,9 @@ class ValidationErrorsView(QtWidgets.QWidget): errors_scroll.setWidget(errors_widget) errors_layout = QtWidgets.QVBoxLayout(errors_widget) - errors_layout.setContentsMargins(0, 0, 0, 0) + # Add 5 margin to left so the is not directly on the edge of the + # scroll widget + errors_layout.setContentsMargins(5, 0, 0, 0) layout = QtWidgets.QVBoxLayout(self) layout.addWidget(errors_scroll, 1) diff --git a/client/ayon_core/tools/publisher/widgets/tasks_model.py b/client/ayon_core/tools/publisher/widgets/tasks_model.py index 8f00dc37a2..78b1f23b17 100644 --- a/client/ayon_core/tools/publisher/widgets/tasks_model.py +++ b/client/ayon_core/tools/publisher/widgets/tasks_model.py @@ -1,6 +1,7 @@ -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtCore, QtGui -from ayon_core.tools.utils.lib import get_default_task_icon +from ayon_core.style import get_default_entity_icon_color +from ayon_core.tools.utils import get_qt_icon TASK_NAME_ROLE = QtCore.Qt.UserRole + 1 TASK_TYPE_ROLE = QtCore.Qt.UserRole + 2 @@ -121,6 +122,11 @@ class TasksModel(QtGui.QStandardItemModel): item = self._items_by_name.pop(task_name) root_item.removeRow(item.row()) + icon = get_qt_icon({ + "type": "awesome-font", + "name": "fa.male", + "color": get_default_entity_icon_color(), + }) new_items = [] for task_name in new_task_names: if task_name in self._items_by_name: @@ -129,7 +135,7 @@ class TasksModel(QtGui.QStandardItemModel): item = QtGui.QStandardItem(task_name) item.setData(task_name, TASK_NAME_ROLE) if task_name: - item.setData(get_default_task_icon(), QtCore.Qt.DecorationRole) + item.setData(icon, QtCore.Qt.DecorationRole) self._items_by_name[task_name] = item new_items.append(item) diff --git a/client/ayon_core/tools/publisher/widgets/widgets.py b/client/ayon_core/tools/publisher/widgets/widgets.py index 4005cf2c84..12c03c7eeb 100644 --- a/client/ayon_core/tools/publisher/widgets/widgets.py +++ b/client/ayon_core/tools/publisher/widgets/widgets.py @@ -1206,7 +1206,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): if folder_path is not None: instance["folderPath"] = folder_path - instance.set_asset_invalid(False) + instance.set_folder_invalid(False) if task_name is not None: instance["task"] = task_name or None diff --git a/client/ayon_core/tools/push_to_project/control.py b/client/ayon_core/tools/push_to_project/control.py index 3b6bd85028..58447a8389 100644 --- a/client/ayon_core/tools/push_to_project/control.py +++ b/client/ayon_core/tools/push_to_project/control.py @@ -1,16 +1,12 @@ import threading -from ayon_core.client import ( - get_asset_by_id, - get_subset_by_id, - get_version_by_id, - get_representations, -) +import ayon_api + from ayon_core.settings import get_project_settings from ayon_core.lib import prepare_template_data from ayon_core.lib.events import QueuedEventSystem from ayon_core.pipeline.create import get_product_name_template -from ayon_core.tools.ayon_utils.models import ProjectsModel, HierarchyModel +from ayon_core.tools.common_models import ProjectsModel, HierarchyModel from .models import ( PushToProjectSelectionModel, @@ -32,9 +28,10 @@ class PushToContextController: self._src_project_name = None self._src_version_id = None - self._src_asset_doc = None - self._src_subset_doc = None - self._src_version_doc = None + self._src_folder_entity = None + self._src_folder_task_entities = {} + self._src_product_entity = None + self._src_version_entity = None self._src_label = None self._submission_enabled = False @@ -71,28 +68,44 @@ class PushToContextController: self._src_project_name = project_name self._src_version_id = version_id self._src_label = None - asset_doc = None - subset_doc = None - version_doc = None + folder_entity = None + task_entities = {} + product_entity = None + version_entity = None if project_name and version_id: - version_doc = get_version_by_id(project_name, version_id) + version_entity = ayon_api.get_version_by_id( + project_name, version_id + ) - if version_doc: - subset_doc = get_subset_by_id(project_name, version_doc["parent"]) + if version_entity: + product_entity = ayon_api.get_product_by_id( + project_name, version_entity["productId"] + ) - if subset_doc: - asset_doc = get_asset_by_id(project_name, subset_doc["parent"]) + if product_entity: + folder_entity = ayon_api.get_folder_by_id( + project_name, product_entity["folderId"] + ) - self._src_asset_doc = asset_doc - self._src_subset_doc = subset_doc - self._src_version_doc = version_doc - if asset_doc: - self._user_values.set_new_folder_name(asset_doc["name"]) + if folder_entity: + task_entities = { + task_entity["name"]: task_entity + for task_entity in ayon_api.get_tasks( + project_name, folder_ids=[folder_entity["id"]] + ) + } + + self._src_folder_entity = folder_entity + self._src_folder_task_entities = task_entities + self._src_product_entity = product_entity + self._src_version_entity = version_entity + if folder_entity: + self._user_values.set_new_folder_name(folder_entity["name"]) variant = self._get_src_variant() if variant: self._user_values.set_variant(variant) - comment = version_doc["data"].get("comment") + comment = version_entity["attrib"].get("comment") if comment: self._user_values.set_comment(comment) @@ -197,39 +210,37 @@ class PushToContextController: if not self._src_project_name or not self._src_version_id: return "Source is not defined" - asset_doc = self._src_asset_doc - if not asset_doc: + folder_entity = self._src_folder_entity + if not folder_entity: return "Source is invalid" - folder_path_parts = list(asset_doc["data"]["parents"]) - folder_path_parts.append(asset_doc["name"]) - folder_path = "/".join(folder_path_parts) - subset_doc = self._src_subset_doc - version_doc = self._src_version_doc - return "Source: {}/{}/{}/v{:0>3}".format( + folder_path = folder_entity["path"] + product_entity = self._src_product_entity + version_entity = self._src_version_entity + return "Source: {}{}/{}/v{:0>3}".format( self._src_project_name, folder_path, - subset_doc["name"], - version_doc["name"] + product_entity["name"], + version_entity["version"] ) - def _get_task_info_from_repre_docs(self, asset_doc, repre_docs): - asset_tasks = asset_doc["data"].get("tasks") or {} + def _get_task_info_from_repre_entities( + self, task_entities, repre_entities + ): found_comb = [] - for repre_doc in repre_docs: - context = repre_doc["context"] - task_info = context.get("task") - if task_info is None: + for repre_entity in repre_entities: + context = repre_entity["context"] + repre_task_name = context.get("task") + if repre_task_name is None: continue + if isinstance(repre_task_name, dict): + repre_task_name = repre_task_name.get("name") + task_name = None task_type = None - if isinstance(task_info, str): - task_name = task_info - asset_task_info = asset_tasks.get(task_info) or {} - task_type = asset_task_info.get("type") - - elif isinstance(task_info, dict): + if repre_task_name: + task_info = task_entities.get(repre_task_name) or {} task_name = task_info.get("name") task_type = task_info.get("type") @@ -245,20 +256,17 @@ class PushToContextController: def _get_src_variant(self): project_name = self._src_project_name - version_doc = self._src_version_doc - asset_doc = self._src_asset_doc - repre_docs = get_representations( - project_name, version_ids=[version_doc["_id"]] + version_entity = self._src_version_entity + task_entities = self._src_folder_task_entities + repre_entities = ayon_api.get_representations( + project_name, version_ids={version_entity["id"]} ) - task_name, task_type = self._get_task_info_from_repre_docs( - asset_doc, repre_docs + task_name, task_type = self._get_task_info_from_repre_entities( + task_entities, repre_entities ) project_settings = get_project_settings(project_name) - subset_doc = self._src_subset_doc - product_type = subset_doc["data"].get("family") - if not product_type: - product_type = subset_doc["data"]["families"][0] + product_type = self._src_product_entity["productType"] template = get_product_name_template( self._src_project_name, product_type, @@ -292,7 +300,7 @@ class PushToContextController: print("Failed format", exc) return "" - product_name = self._src_subset_doc["name"] + product_name = self._src_product_entity["name"] if ( (product_s and not product_name.startswith(product_s)) or (product_e and not product_name.endswith(product_e)) diff --git a/client/ayon_core/tools/push_to_project/models/integrate.py b/client/ayon_core/tools/push_to_project/models/integrate.py index b427f3d226..6e43050c05 100644 --- a/client/ayon_core/tools/push_to_project/models/integrate.py +++ b/client/ayon_core/tools/push_to_project/models/integrate.py @@ -1,41 +1,25 @@ import os import re import copy -import socket import itertools -import datetime import sys import traceback import uuid -from ayon_core.client import ( - get_project, - get_assets, - get_asset_by_id, - get_subset_by_id, - get_subset_by_name, - get_version_by_id, - get_last_version_by_subset_id, - get_version_by_name, - get_representations, -) -from ayon_core.client.operations import ( +import ayon_api +from ayon_api.utils import create_entity_id +from ayon_api.operations import ( OperationsSession, - new_asset_document, - new_subset_document, - new_version_doc, - new_representation_doc, - prepare_version_update_data, - prepare_representation_update_data, -) -from ayon_core.addon import AddonsManager -from ayon_core.lib import ( - StringTemplate, - get_ayon_username, - get_formatted_current_time, - source_hash, + new_folder_entity, + new_product_entity, + new_version_entity, + new_representation_entity, ) +from ayon_core.lib import ( + StringTemplate, + source_hash, +) from ayon_core.lib.file_transaction import FileTransaction from ayon_core.settings import get_project_settings from ayon_core.pipeline import Anatomy @@ -235,20 +219,20 @@ class ProjectPushRepreItem: but filenames are not template based. Args: - repre_doc (Dict[str, Ant]): Representation document. + repre_entity (Dict[str, Ant]): Representation entity. roots (Dict[str, str]): Project roots (based on project anatomy). """ - def __init__(self, repre_doc, roots): - self._repre_doc = repre_doc + def __init__(self, repre_entity, roots): + self._repre_entity = repre_entity self._roots = roots self._src_files = None self._resource_files = None self._frame = UNKNOWN @property - def repre_doc(self): - return self._repre_doc + def repre_entity(self): + return self._repre_entity @property def src_files(self): @@ -327,7 +311,7 @@ class ProjectPushRepreItem: if self._src_files is not None: return self._src_files, self._resource_files - repre_context = self._repre_doc["context"] + repre_context = self._repre_entity["context"] if "frame" in repre_context or "udim" in repre_context: src_files, resource_files = self._get_source_files_with_frames() else: @@ -344,7 +328,7 @@ class ProjectPushRepreItem: udim_placeholder = "__udim__" src_files = [] resource_files = [] - template = self._repre_doc["data"]["template"] + template = self._repre_entity["attrib"]["template"] # Remove padding from 'udim' and 'frame' formatting keys # - "{frame:0>4}" -> "{frame}" for key in ("udim", "frame"): @@ -352,7 +336,7 @@ class ProjectPushRepreItem: replacement = "{{{}}}".format(key) template = re.sub(sub_part, replacement, template) - repre_context = self._repre_doc["context"] + repre_context = self._repre_entity["context"] fill_repre_context = copy.deepcopy(repre_context) if "frame" in fill_repre_context: fill_repre_context["frame"] = frame_placeholder @@ -373,7 +357,7 @@ class ProjectPushRepreItem: .replace(udim_placeholder, "(?P[0-9]+)") ) src_basename_regex = re.compile("^{}$".format(src_basename)) - for file_info in self._repre_doc["files"]: + for file_info in self._repre_entity["files"]: filepath_template = self._clean_path(file_info["path"]) filepath = self._clean_path( filepath_template.format(root=self._roots) @@ -405,8 +389,8 @@ class ProjectPushRepreItem: def _get_source_files(self): src_files = [] resource_files = [] - template = self._repre_doc["data"]["template"] - repre_context = self._repre_doc["context"] + template = self._repre_entity["attrib"]["template"] + repre_context = self._repre_entity["context"] fill_repre_context = copy.deepcopy(repre_context) fill_roots = fill_repre_context["root"] for root_name in tuple(fill_roots.keys()): @@ -415,7 +399,7 @@ class ProjectPushRepreItem: fill_repre_context) repre_path = self._clean_path(repre_path) src_dirpath = os.path.dirname(repre_path) - for file_info in self._repre_doc["files"]: + for file_info in self._repre_entity["files"]: filepath_template = self._clean_path(file_info["path"]) filepath = self._clean_path( filepath_template.format(root=self._roots)) @@ -448,18 +432,17 @@ class ProjectPushItemProcess: self._model = model self._item = item - self._src_asset_doc = None - self._src_subset_doc = None - self._src_version_doc = None + self._src_folder_entity = None + self._src_product_entity = None + self._src_version_entity = None self._src_repre_items = None - self._project_doc = None + self._project_entity = None self._anatomy = None - self._asset_doc = None - self._created_asset_doc = None + self._folder_entity = None self._task_info = None - self._subset_doc = None - self._version_doc = None + self._product_entity = None + self._version_entity = None self._product_type = None self._product_name = None @@ -492,12 +475,12 @@ class ProjectPushItemProcess: self._log_info("Source entities were found") self._fill_destination_project() self._log_info("Destination project was found") - self._fill_or_create_destination_asset() - self._log_info("Destination asset was determined") + self._fill_or_create_destination_folder() + self._log_info("Destination folder was determined") self._determine_product_type() self._determine_publish_template_name() self._determine_product_name() - self._make_sure_subset_exists() + self._make_sure_product_exists() self._make_sure_version_exists() self._log_info("Prerequirements were prepared") self._integrate_representations() @@ -562,8 +545,8 @@ class ProjectPushItemProcess: src_project_name = self._item.src_project_name src_version_id = self._item.src_version_id - project_doc = get_project(src_project_name) - if not project_doc: + project_entity = ayon_api.get_project(src_project_name) + if not project_entity: self._status.set_failed( f"Source project \"{src_project_name}\" was not found" ) @@ -576,41 +559,47 @@ class ProjectPushItemProcess: self._log_debug(f"Project '{src_project_name}' found") - version_doc = get_version_by_id(src_project_name, src_version_id) - if not version_doc: + version_entity = ayon_api.get_version_by_id( + src_project_name, src_version_id + ) + if not version_entity: self._status.set_failed(( f"Source version with id \"{src_version_id}\"" f" was not found in project \"{src_project_name}\"" )) raise PushToProjectError(self._status.fail_reason) - product_id = version_doc["parent"] - subset_doc = get_subset_by_id(src_project_name, product_id) - if not subset_doc: + product_id = version_entity["productId"] + product_entity = ayon_api.get_product_by_id( + src_project_name, product_id + ) + if not product_entity: self._status.set_failed(( f"Could find product with id \"{product_id}\"" f" in project \"{src_project_name}\"" )) raise PushToProjectError(self._status.fail_reason) - asset_id = subset_doc["parent"] - asset_doc = get_asset_by_id(src_project_name, asset_id) - if not asset_doc: + folder_id = product_entity["folderId"] + folder_entity = ayon_api.get_folder_by_id( + src_project_name, folder_id, own_attributes=True + ) + if not folder_entity: self._status.set_failed(( - f"Could find asset with id \"{asset_id}\"" + f"Could find folder with id \"{folder_id}\"" f" in project \"{src_project_name}\"" )) raise PushToProjectError(self._status.fail_reason) anatomy = Anatomy(src_project_name) - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( src_project_name, - version_ids=[src_version_id] + version_ids={src_version_id} ) repre_items = [ - ProjectPushRepreItem(repre_doc, anatomy.roots) - for repre_doc in repre_docs + ProjectPushRepreItem(repre_entity, anatomy.roots) + for repre_entity in repre_entities ] self._log_debug(( f"Found {len(repre_items)} representations on" @@ -623,17 +612,17 @@ class ProjectPushItemProcess: ) raise PushToProjectError(self._status.fail_reason) - self._src_asset_doc = asset_doc - self._src_subset_doc = subset_doc - self._src_version_doc = version_doc + self._src_folder_entity = folder_entity + self._src_product_entity = product_entity + self._src_version_entity = version_entity self._src_repre_items = repre_items def _fill_destination_project(self): # --- Destination entities --- dst_project_name = self._item.dst_project_name # Validate project existence - dst_project_doc = get_project(dst_project_name) - if not dst_project_doc: + dst_project_entity = ayon_api.get_project(dst_project_name) + if not dst_project_entity: self._status.set_failed( f"Destination project '{dst_project_name}' was not found" ) @@ -642,53 +631,46 @@ class ProjectPushItemProcess: self._log_debug( f"Destination project '{dst_project_name}' found" ) - self._project_doc = dst_project_doc - self._anatomy = Anatomy(dst_project_name) + self._project_entity = dst_project_entity + self._anatomy = Anatomy( + dst_project_name, + project_entity=dst_project_entity + ) self._project_settings = get_project_settings( self._item.dst_project_name ) - def _create_asset( + def _create_folder( self, - src_asset_doc, - project_doc, - parent_asset_doc, - asset_name + src_folder_entity, + project_entity, + parent_folder_entity, + folder_name ): parent_id = None - parents = [] - tools = [] - if parent_asset_doc: - parent_id = parent_asset_doc["_id"] - parents = list(parent_asset_doc["data"]["parents"]) - parents.append(parent_asset_doc["name"]) - _tools = parent_asset_doc["data"].get("tools_env") - if _tools: - tools = list(_tools) + if parent_folder_entity: + parent_id = parent_folder_entity["id"] - asset_name_low = asset_name.lower() - other_asset_docs = get_assets( - project_doc["name"], fields=["_id", "name", "data.visualParent"] + folder_name_low = folder_name.lower() + other_folder_entities = ayon_api.get_folders( + project_entity["name"], + parent_ids=[parent_id], + fields={"id", "name"} ) - for other_asset_doc in other_asset_docs: - other_name = other_asset_doc["name"] - other_parent_id = other_asset_doc["data"].get("visualParent") - if other_name.lower() != asset_name_low: + for other_folder_entity in other_folder_entities: + other_name = other_folder_entity["name"] + if other_name.lower() != folder_name_low: continue - if other_parent_id != parent_id: - self._status.set_failed(( - f"Asset with name \"{other_name}\" already" - " exists in different hierarchy." - )) - raise PushToProjectError(self._status.fail_reason) - self._log_debug(( - f"Found already existing asset with name \"{other_name}\"" - f" which match requested name \"{asset_name}\"" + f"Found already existing folder with name \"{other_name}\"" + f" which match requested name \"{folder_name}\"" )) - return get_asset_by_id(project_doc["name"], other_asset_doc["_id"]) + return ayon_api.get_folder_by_id( + project_entity["name"], other_folder_entity["id"] + ) + # TODO should we hard pass attribute values? data_keys = ( "clipIn", "clipOut", @@ -701,103 +683,113 @@ class ProjectPushItemProcess: "fps", "pixelAspect", ) - asset_data = { - "visualParent": parent_id, - "parents": parents, - "tasks": {}, - "tools_env": tools - } - src_asset_data = src_asset_doc["data"] - for key in data_keys: - if key in src_asset_data: - asset_data[key] = src_asset_data[key] + new_folder_attrib = {} + src_attrib = src_folder_entity["attrib"] + for attr_name, attr_value in src_attrib.items(): + if attr_name in data_keys: + new_folder_attrib[attr_name] = attr_value - asset_doc = new_asset_document( - asset_name, - project_doc["_id"], - parent_id, - parents, - data=asset_data + new_folder_name = ayon_api.slugify_string(folder_name) + folder_label = None + if new_folder_name != folder_name: + folder_label = folder_name + + # TODO find out how to define folder type + folder_entity = new_folder_entity( + folder_name, + "Folder", + parent_id=parent_id, + attribs=new_folder_attrib ) + if folder_label: + folder_entity["label"] = folder_label + self._operations.create_entity( - project_doc["name"], - asset_doc["type"], - asset_doc + project_entity["name"], + "folder", + folder_entity ) self._log_info( - f"Creating new asset with name \"{asset_name}\"" + f"Creating new folder with name \"{folder_name}\"" ) - self._created_asset_doc = asset_doc - return asset_doc + # Calculate path for usage in rest of logic + parent_path = "" + if parent_folder_entity: + parent_path = parent_folder_entity["path"] + folder_entity["path"] = "/".join([parent_path, folder_name]) + return folder_entity - def _fill_or_create_destination_asset(self): + def _fill_or_create_destination_folder(self): dst_project_name = self._item.dst_project_name dst_folder_id = self._item.dst_folder_id dst_task_name = self._item.dst_task_name + dst_task_name_low = dst_task_name.lower() new_folder_name = self._item.new_folder_name if not dst_folder_id and not new_folder_name: self._status.set_failed( - "Push item does not have defined destination asset" + "Push item does not have defined destination folder" ) raise PushToProjectError(self._status.fail_reason) - # Get asset document - parent_asset_doc = None + # Get folder entity + parent_folder_entity = None if dst_folder_id: - parent_asset_doc = get_asset_by_id( + parent_folder_entity = ayon_api.get_folder_by_id( self._item.dst_project_name, self._item.dst_folder_id ) - if not parent_asset_doc: + if not parent_folder_entity: self._status.set_failed( - f"Could find asset with id \"{dst_folder_id}\"" + f"Could find folder with id \"{dst_folder_id}\"" f" in project \"{dst_project_name}\"" ) raise PushToProjectError(self._status.fail_reason) if not new_folder_name: - asset_doc = parent_asset_doc + folder_entity = parent_folder_entity else: - asset_doc = self._create_asset( - self._src_asset_doc, - self._project_doc, - parent_asset_doc, + folder_entity = self._create_folder( + self._src_folder_entity, + self._project_entity, + parent_folder_entity, new_folder_name ) - self._asset_doc = asset_doc + self._folder_entity = folder_entity if not dst_task_name: self._task_info = {} return - asset_path_parts = list(asset_doc["data"]["parents"]) - asset_path_parts.append(asset_doc["name"]) - asset_path = "/".join(asset_path_parts) - asset_tasks = asset_doc.get("data", {}).get("tasks") or {} - task_info = asset_tasks.get(dst_task_name) + folder_path = folder_entity["path"] + folder_tasks = { + task_entity["name"].lower(): task_entity + for task_entity in ayon_api.get_tasks( + dst_project_name, folder_ids=[folder_entity["id"]] + ) + } + task_info = folder_tasks.get(dst_task_name_low) if not task_info: self._status.set_failed( f"Could find task with name \"{dst_task_name}\"" - f" on asset \"{asset_path}\"" + f" on folder \"{folder_path}\"" f" in project \"{dst_project_name}\"" ) raise PushToProjectError(self._status.fail_reason) - # Create copy of task info to avoid changing data in asset document + # Create copy of task info to avoid changing data in task entity task_info = copy.deepcopy(task_info) task_info["name"] = dst_task_name # Fill rest of task information based on task type - task_type = task_info["type"] - task_type_info = self._project_doc["config"]["tasks"].get( - task_type, {}) + task_type_name = task_info["type"] + task_types_by_name = { + task_type["name"]: task_type + for task_type in self._project_entity["taskTypes"] + } + task_type_info = task_types_by_name.get(task_type_name, {}) task_info.update(task_type_info) self._task_info = task_info def _determine_product_type(self): - subset_doc = self._src_subset_doc - product_type = subset_doc["data"].get("family") - families = subset_doc["data"].get("families") - if not product_type and families: - product_type = families[0] - + product_entity = self._src_product_entity + product_type = product_entity["productType"] if not product_type: self._status.set_failed( "Couldn't figure out product type from source product" @@ -826,12 +818,16 @@ class ProjectPushItemProcess: def _determine_product_name(self): product_type = self._product_type - asset_doc = self._asset_doc task_info = self._task_info + task_name = task_type = None + if task_info: + task_name = task_info["name"] + task_type = task_info["type"] + product_name = get_product_name( self._item.dst_project_name, - asset_doc, - task_info.get("name"), + task_name, + task_type, self.host_name, product_type, self._item.variant, @@ -842,91 +838,103 @@ class ProjectPushItemProcess: ) self._product_name = product_name - def _make_sure_subset_exists(self): + def _make_sure_product_exists(self): project_name = self._item.dst_project_name - asset_id = self._asset_doc["_id"] + folder_id = self._folder_entity["id"] product_name = self._product_name product_type = self._product_type - subset_doc = get_subset_by_name(project_name, product_name, asset_id) - if subset_doc: - self._subset_doc = subset_doc - return subset_doc - - data = { - "families": [product_type] - } - subset_doc = new_subset_document( - product_name, product_type, asset_id, data + product_entity = ayon_api.get_product_by_name( + project_name, product_name, folder_id ) - self._operations.create_entity(project_name, "subset", subset_doc) - self._subset_doc = subset_doc + if product_entity: + self._product_entity = product_entity + return product_entity + + product_entity = new_product_entity( + product_name, + product_type, + folder_id, + ) + self._operations.create_entity( + project_name, "product", product_entity + ) + self._product_entity = product_entity def _make_sure_version_exists(self): """Make sure version document exits in database.""" project_name = self._item.dst_project_name version = self._item.dst_version - src_version_doc = self._src_version_doc - subset_doc = self._subset_doc - product_id = subset_doc["_id"] - src_data = src_version_doc["data"] - families = subset_doc["data"].get("families") - if not families: - families = [subset_doc["data"]["family"]] + src_version_entity = self._src_version_entity + product_entity = self._product_entity + product_id = product_entity["id"] + product_type = product_entity["productType"] + src_attrib = src_version_entity["attrib"] + + dst_attrib = {} + for key in { + "productType", + "productTypes", + "families", + "fps", + "pixelAspect", + "clipIn", + "clipOut", + "frameStart", + "frameEnd", + "handleStart", + "handleEnd", + "resolutionWidth", + "resolutionHeight", + "colorSpace", + "source", + "comment", + "description", + "intent", + }: + if key in src_attrib: + dst_attrib[key] = src_attrib[key] - version_data = { - "families": list(families), - "fps": src_data.get("fps"), - "source": src_data.get("source"), - "machine": socket.gethostname(), - "comment": self._item.comment or "", - "author": get_ayon_username(), - "time": get_formatted_current_time(), - } if version is None: - last_version_doc = get_last_version_by_subset_id( + last_version_entity = ayon_api.get_last_version_by_product_id( project_name, product_id ) - if last_version_doc: - version = int(last_version_doc["name"]) + 1 + if last_version_entity: + version = int(last_version_entity["version"]) + 1 else: version = get_versioning_start( project_name, self.host_name, task_name=self._task_info["name"], task_type=self._task_info["type"], - product_type=families[0], - product_name=subset_doc["name"] + product_type=product_type, + product_name=product_entity["name"] ) - existing_version_doc = get_version_by_name( + existing_version_entity = ayon_api.get_version_by_name( project_name, version, product_id ) # Update existing version - if existing_version_doc: - version_doc = new_version_doc( - version, product_id, version_data, existing_version_doc["_id"] + if existing_version_entity: + self._operations.update_entity( + project_name, + "version", + existing_version_entity["id"], + {"attrib": dst_attrib} ) - update_data = prepare_version_update_data( - existing_version_doc, version_doc - ) - if update_data: - self._operations.update_entity( - project_name, - "version", - existing_version_doc["_id"], - update_data - ) - self._version_doc = version_doc - + existing_version_entity["attrib"].update(dst_attrib) + self._version_entity = existing_version_entity return - version_doc = new_version_doc( - version, product_id, version_data + version_entity = new_version_entity( + version, + product_id, + attribs=dst_attrib, ) - self._operations.create_entity(project_name, "version", version_doc) - - self._version_doc = version_doc + self._operations.create_entity( + project_name, "version", version_entity + ) + self._version_entity = version_entity def _integrate_representations(self): try: @@ -937,21 +945,21 @@ class ProjectPushItemProcess: raise def _real_integrate_representations(self): - version_doc = self._version_doc - version_id = version_doc["_id"] - existing_repres = get_representations( + version_entity = self._version_entity + version_id = version_entity["id"] + existing_repres = ayon_api.get_representations( self._item.dst_project_name, - version_ids=[version_id] + version_ids={version_id} ) existing_repres_by_low_name = { - repre_doc["name"].lower(): repre_doc - for repre_doc in existing_repres + repre_entity["name"].lower(): repre_entity + for repre_entity in existing_repres } template_name = self._template_name anatomy = self._anatomy formatting_data = get_template_data( - self._project_doc, - self._asset_doc, + self._project_entity, + self._folder_entity, self._task_info.get("name"), self.host_name ) @@ -962,15 +970,12 @@ class ProjectPushItemProcess: "name": self._product_name, "type": self._product_type, }, - "version": version_doc["name"] + "version": version_entity["version"] }) - path_template = anatomy.templates[template_name]["path"].replace( - "\\", "/" - ) - file_template = StringTemplate( - anatomy.templates[template_name]["file"] - ) + publish_template = anatomy.get_template_item("publish", template_name) + path_template = publish_template["path"].template.replace("\\", "/") + file_template = publish_template["file"] self._log_info("Preparing files to transfer") processed_repre_items = self._prepare_file_transactions( anatomy, template_name, formatting_data, file_template @@ -992,8 +997,8 @@ class ProjectPushItemProcess: ): processed_repre_items = [] for repre_item in self._src_repre_items: - repre_doc = repre_item.repre_doc - repre_name = repre_doc["name"] + repre_entity = repre_item.repre_entity + repre_name = repre_entity["name"] repre_format_data = copy.deepcopy(formatting_data) repre_format_data["representation"] = repre_name for src_file in repre_item.src_files: @@ -1002,11 +1007,13 @@ class ProjectPushItemProcess: break # Re-use 'output' from source representation - repre_output_name = repre_doc["context"].get("output") + repre_output_name = repre_entity["context"].get("output") if repre_output_name is not None: repre_format_data["output"] = repre_output_name - template_obj = anatomy.templates_obj[template_name]["folder"] + template_obj = anatomy.get_template_item( + "publish", template_name, "directory" + ) folder_path = template_obj.format_strict(formatting_data) repre_context = folder_path.used_values folder_path_rootless = folder_path.rootless @@ -1059,34 +1066,24 @@ class ProjectPushItemProcess: path_template, existing_repres_by_low_name ): - addons_manager = AddonsManager() - sync_server_module = addons_manager.get("sync_server") - if sync_server_module is None or not sync_server_module.enabled: - sites = [{ - "name": "studio", - "created_dt": datetime.datetime.now() - }] - else: - sites = sync_server_module.compute_resource_sync_sites( - project_name=self._item.dst_project_name - ) - added_repre_names = set() for item in processed_repre_items: (repre_item, repre_filepaths, repre_context, published_path) = item - repre_name = repre_item.repre_doc["name"] + repre_name = repre_item.repre_entity["name"] added_repre_names.add(repre_name.lower()) - new_repre_data = { + new_repre_attributes = { "path": published_path, "template": path_template } new_repre_files = [] for (path, rootless_path) in repre_filepaths: new_repre_files.append({ + "id": create_entity_id(), + "name": os.path.basename(rootless_path), "path": rootless_path, "size": os.path.getsize(path), "hash": source_hash(path), - "sites": sites + "hash_type": "op3", }) existing_repre = existing_repres_by_low_name.get( @@ -1094,41 +1091,51 @@ class ProjectPushItemProcess: ) entity_id = None if existing_repre: - entity_id = existing_repre["_id"] - new_repre_doc = new_representation_doc( + entity_id = existing_repre["id"] + repre_entity = new_representation_entity( repre_name, version_id, - repre_context, - data=new_repre_data, + new_repre_files, + data={"context": repre_context}, + attribs=new_repre_attributes, entity_id=entity_id ) - new_repre_doc["files"] = new_repre_files if not existing_repre: self._operations.create_entity( self._item.dst_project_name, - new_repre_doc["type"], - new_repre_doc + "representation", + repre_entity ) else: - update_data = prepare_representation_update_data( - existing_repre, new_repre_doc - ) - if update_data: + changes = {} + for key, value in repre_entity.items(): + if key == "attrib": + continue + if value != existing_repre.get(key): + changes[key] = value + attrib_changes = {} + for key, value in repre_entity["attrib"].items(): + if value != existing_repre["attrib"].get(key): + attrib_changes[key] = value + if attrib_changes: + changes["attrib"] = attrib_changes + + if changes: self._operations.update_entity( self._item.dst_project_name, - new_repre_doc["type"], - new_repre_doc["_id"], - update_data + "representation", + entity_id, + changes ) existing_repre_names = set(existing_repres_by_low_name.keys()) for repre_name in (existing_repre_names - added_repre_names): - repre_doc = existing_repres_by_low_name[repre_name] + repre_entity = existing_repres_by_low_name[repre_name] self._operations.update_entity( self._item.dst_project_name, - repre_doc["type"], - repre_doc["_id"], - {"type": "archived_representation"} + "representation", + repre_entity["id"], + {"active": False} ) diff --git a/client/ayon_core/tools/push_to_project/ui/window.py b/client/ayon_core/tools/push_to_project/ui/window.py index 4d39075dc3..4d64509afd 100644 --- a/client/ayon_core/tools/push_to_project/ui/window.py +++ b/client/ayon_core/tools/push_to_project/ui/window.py @@ -5,8 +5,6 @@ from ayon_core.tools.utils import ( PlaceholderLineEdit, SeparatorWidget, set_style_property, -) -from ayon_core.tools.ayon_utils.widgets import ( ProjectsCombobox, FoldersWidget, TasksWidget, @@ -158,7 +156,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget): main_thread_timer.timeout.connect(self._on_main_thread_timer) show_timer.timeout.connect(self._on_show_timer) user_input_changed_timer.timeout.connect(self._on_user_input_timer) - folder_name_input.textChanged.connect(self._on_new_asset_change) + folder_name_input.textChanged.connect(self._on_new_folder_change) variant_input.textChanged.connect(self._on_variant_change) comment_input.textChanged.connect(self._on_comment_change) @@ -169,7 +167,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget): controller.register_event_callback( "new_folder_name.changed", - self._on_controller_new_asset_change + self._on_controller_new_folder_change ) controller.register_event_callback( "variant.changed", self._on_controller_variant_change @@ -291,7 +289,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self.refresh() - def _on_new_asset_change(self, text): + def _on_new_folder_change(self, text): self._new_folder_name_input_text = text self._user_input_changed_timer.start() @@ -319,7 +317,7 @@ class PushToContextSelectWindow(QtWidgets.QWidget): self._comment_input_text = None self._controller.set_user_value_comment(comment) - def _on_controller_new_asset_change(self, event): + def _on_controller_new_folder_change(self, event): folder_name = event["new_folder_name"] if ( self._new_folder_name_input_text is None diff --git a/client/ayon_core/tools/pyblish_pype/util.py b/client/ayon_core/tools/pyblish_pype/util.py index 8126637060..09a370c6e4 100644 --- a/client/ayon_core/tools/pyblish_pype/util.py +++ b/client/ayon_core/tools/pyblish_pype/util.py @@ -7,8 +7,6 @@ from __future__ import ( import os import sys -import numbers -import copy import collections from qtpy import QtCore @@ -39,7 +37,7 @@ def defer(delay, func): This aids in keeping the GUI responsive, but complicates logic when producing tests. To combat this, the environment variable ensures - that every operation is synchonous. + that every operation is synchronous. Arguments: delay (float): Delay multiplier; default 1, 0 means no delay diff --git a/client/ayon_core/tools/sceneinventory/control.py b/client/ayon_core/tools/sceneinventory/control.py index 16b889e855..592113455c 100644 --- a/client/ayon_core/tools/sceneinventory/control.py +++ b/client/ayon_core/tools/sceneinventory/control.py @@ -6,7 +6,7 @@ from ayon_core.pipeline import ( registered_host, get_current_context, ) -from ayon_core.tools.ayon_utils.models import HierarchyModel +from ayon_core.tools.common_models import HierarchyModel from .models import SiteSyncModel @@ -28,7 +28,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model = SiteSyncModel(self) + self._sitesync_model = SiteSyncModel(self) # Switch dialog requirements self._hierarchy_model = HierarchyModel(self) self._event_system = self._create_event_system() @@ -47,7 +47,7 @@ class SceneInventoryController: self._current_folder_id = None self._current_folder_set = False - self._site_sync_model.reset() + self._sitesync_model.reset() self._hierarchy_model.reset() def get_current_context(self): @@ -89,22 +89,22 @@ class SceneInventoryController: return [] # Site Sync methods - def is_sync_server_enabled(self): - return self._site_sync_model.is_sync_server_enabled() + def is_sitesync_enabled(self): + return self._sitesync_model.is_sitesync_enabled() def get_sites_information(self): - return self._site_sync_model.get_sites_information() + return self._sitesync_model.get_sites_information() def get_site_provider_icons(self): - return self._site_sync_model.get_site_provider_icons() + return self._sitesync_model.get_site_provider_icons() def get_representations_site_progress(self, representation_ids): - return self._site_sync_model.get_representations_site_progress( + return self._sitesync_model.get_representations_site_progress( representation_ids ) def resync_representations(self, representation_ids, site_type): - return self._site_sync_model.resync_representations( + return self._sitesync_model.resync_representations( representation_ids, site_type ) diff --git a/client/ayon_core/tools/sceneinventory/delegates.py b/client/ayon_core/tools/sceneinventory/delegates.py index 1f8bb81835..2126fa1cbe 100644 --- a/client/ayon_core/tools/sceneinventory/delegates.py +++ b/client/ayon_core/tools/sceneinventory/delegates.py @@ -1,9 +1,7 @@ import numbers -from ayon_core.client import ( - get_versions, - get_hero_versions, -) +import ayon_api + from ayon_core.pipeline import HeroVersionType from ayon_core.tools.utils.models import TreeModel from ayon_core.tools.utils.lib import format_version @@ -27,7 +25,7 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate): def displayText(self, value, locale): if isinstance(value, HeroVersionType): - return format_version(value, True) + return format_version(value) if not isinstance(value, numbers.Integral): # For cases where no version is resolved like NOT FOUND cases # where a representation might not exist in current database @@ -113,71 +111,35 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate): # Current value of the index item = index.data(TreeModel.ItemRole) value = index.data(QtCore.Qt.DisplayRole) - if item["version_document"]["type"] != "hero_version": - assert isinstance(value, numbers.Integral), ( - "Version is not integer" - ) project_name = self.get_project_name() # Add all available versions to the editor - parent_id = item["version_document"]["parent"] - version_docs = [ - version_doc - for version_doc in sorted( - get_versions(project_name, subset_ids=[parent_id]), - key=lambda item: item["name"] - ) - if version_doc["data"].get("active", True) - ] - - hero_versions = list( - get_hero_versions( - project_name, - subset_ids=[parent_id], - fields=["name", "data.tags", "version_id"] - ) - ) - hero_version_doc = None - if hero_versions: - hero_version_doc = hero_versions[0] - - doc_for_hero_version = None + product_id = item["version_entity"]["productId"] + version_entities = list(sorted( + ayon_api.get_versions( + project_name, product_ids={product_id}, active=True + ), + key=lambda item: abs(item["version"]) + )) selected = None items = [] - for version_doc in version_docs: - version_tags = version_doc["data"].get("tags") or [] - if "deleted" in version_tags: - continue + is_hero_version = value < 0 + for version_entity in version_entities: + version = version_entity["version"] + label = format_version(version) + item = QtGui.QStandardItem(label) + item.setData(version_entity, QtCore.Qt.UserRole) + items.append(item) if ( - hero_version_doc - and doc_for_hero_version is None - and hero_version_doc["version_id"] == version_doc["_id"] + version == value + or is_hero_version and version < 0 ): - doc_for_hero_version = version_doc - - label = format_version(version_doc["name"]) - item = QtGui.QStandardItem(label) - item.setData(version_doc, QtCore.Qt.UserRole) - items.append(item) - - if version_doc["name"] == value: selected = item - if hero_version_doc and doc_for_hero_version: - version_name = doc_for_hero_version["name"] - label = format_version(version_name, True) - if isinstance(value, HeroVersionType): - index = len(version_docs) - hero_version_doc["name"] = HeroVersionType(version_name) - - item = QtGui.QStandardItem(label) - item.setData(hero_version_doc, QtCore.Qt.UserRole) - items.append(item) - # Reverse items so latest versions be upper - items = list(reversed(items)) + items.reverse() for item in items: editor.model().appendRow(item) diff --git a/client/ayon_core/tools/sceneinventory/model.py b/client/ayon_core/tools/sceneinventory/model.py index 18fc56db0b..330b174218 100644 --- a/client/ayon_core/tools/sceneinventory/model.py +++ b/client/ayon_core/tools/sceneinventory/model.py @@ -1,29 +1,20 @@ -import collections import re import logging import uuid -import copy from collections import defaultdict +import ayon_api from qtpy import QtCore, QtGui import qtawesome -from ayon_core.client import ( - get_assets, - get_subsets, - get_versions, - get_last_version_by_subset_id, - get_representations, -) from ayon_core.pipeline import ( get_current_project_name, - schema, HeroVersionType, ) from ayon_core.style import get_default_entity_icon_color +from ayon_core.tools.utils import get_qt_icon from ayon_core.tools.utils.models import TreeModel, Item -from ayon_core.tools.ayon_utils.widgets import get_qt_icon def walk_hierarchy(node): @@ -77,13 +68,7 @@ class InventoryModel(TreeModel): } def outdated(self, item): - value = item.get("version") - if isinstance(value, HeroVersionType): - return False - - if item.get("version") == item.get("highest_version"): - return False - return True + return item.get("isOutdated", True) def data(self, index, role): if not index.isValid(): @@ -249,29 +234,29 @@ class InventoryModel(TreeModel): not_found_ids.append(repre_id) continue - version = versions_by_id.get(representation["parent"]) - if not version: + version_entity = versions_by_id.get(representation["versionId"]) + if not version_entity: not_found["version"].extend(group_containers) not_found_ids.append(repre_id) continue - product = products_by_id.get(version["parent"]) - if not product: + product_entity = products_by_id.get(version_entity["productId"]) + if not product_entity: not_found["product"].extend(group_containers) not_found_ids.append(repre_id) continue - folder = folders_by_id.get(product["parent"]) - if not folder: + folder_entity = folders_by_id.get(product_entity["folderId"]) + if not folder_entity: not_found["folder"].extend(group_containers) not_found_ids.append(repre_id) continue group_dict.update({ "representation": representation, - "version": version, - "subset": product, - "asset": folder + "version": version_entity, + "product": product_entity, + "folder": folder_entity }) for _repre_id in not_found_ids: @@ -306,45 +291,60 @@ class InventoryModel(TreeModel): ) sites_info = self._controller.get_sites_information() + # Query the highest available version so the model can know + # whether current version is currently up-to-date. + highest_version_by_product_id = ayon_api.get_last_versions( + project_name, + product_ids={ + group["version"]["productId"] for group in grouped.values() + }, + fields={"productId", "version"} + ) + # Map value to `version` key + highest_version_by_product_id = { + product_id: version["version"] + for product_id, version in highest_version_by_product_id.items() + } + for repre_id, group_dict in sorted(grouped.items()): group_containers = group_dict["containers"] - representation = group_dict["representation"] - version = group_dict["version"] - subset = group_dict["subset"] - asset = group_dict["asset"] + repre_entity = group_dict["representation"] + version_entity = group_dict["version"] + folder_entity = group_dict["folder"] + product_entity = group_dict["product"] - # Get product type - maj_version, _ = schema.get_schema_version(subset["schema"]) - if maj_version < 3: - src_doc = version - else: - src_doc = subset - - product_type = src_doc["data"].get("family") - if not product_type: - families = src_doc["data"].get("families") - if families: - product_type = families[0] - - # Store the highest available version so the model can know - # whether current version is currently up-to-date. - highest_version = get_last_version_by_subset_id( - project_name, version["parent"] - ) + product_type = product_entity["productType"] # create the group header group_node = Item() group_node["Name"] = "{}_{}: ({})".format( - asset["name"], subset["name"], representation["name"] + folder_entity["name"], + product_entity["name"], + repre_entity["name"] ) group_node["representation"] = repre_id - group_node["version"] = version["name"] - group_node["highest_version"] = highest_version["name"] + + # Detect hero version type + version = version_entity["version"] + if version < 0: + version = HeroVersionType(version) + group_node["version"] = version + + # Check if the version is outdated. + # Hero versions are never considered to be outdated. + is_outdated = False + if not isinstance(version, HeroVersionType): + last_version = highest_version_by_product_id.get( + version_entity["productId"]) + if last_version is not None: + is_outdated = version_entity["version"] != last_version + group_node["isOutdated"] = is_outdated + group_node["productType"] = product_type or "" group_node["productTypeIcon"] = product_type_icon group_node["count"] = len(group_containers) group_node["isGroupNode"] = True - group_node["group"] = subset["data"].get("subsetGroup") + group_node["group"] = product_entity["attrib"].get("productGroup") # Site sync specific data progress = progress_by_id[repre_id] @@ -359,7 +359,8 @@ class InventoryModel(TreeModel): item_node.update(container) # store the current version on the item - item_node["version"] = version["name"] + item_node["version"] = version_entity["version"] + item_node["version_entity"] = version_entity # Remapping namespace to item name. # Noted that the name key is capital "N", by doing this, we @@ -404,73 +405,50 @@ class InventoryModel(TreeModel): if not filtered_repre_ids: return output - repre_docs = get_representations(project_name, repre_ids) + repre_entities = ayon_api.get_representations(project_name, repre_ids) repres_by_id.update({ - repre_doc["_id"]: repre_doc - for repre_doc in repre_docs + repre_entity["id"]: repre_entity + for repre_entity in repre_entities }) version_ids = { - repre_doc["parent"] for repre_doc in repres_by_id.values() + repre_entity["versionId"] + for repre_entity in repres_by_id.values() } if not version_ids: return output - version_docs = get_versions(project_name, version_ids, hero=True) versions_by_id.update({ - version_doc["_id"]: version_doc - for version_doc in version_docs - }) - hero_versions_by_subversion_id = collections.defaultdict(list) - for version_doc in versions_by_id.values(): - if version_doc["type"] != "hero_version": - continue - subversion = version_doc["version_id"] - hero_versions_by_subversion_id[subversion].append(version_doc) - - if hero_versions_by_subversion_id: - subversion_ids = set( - hero_versions_by_subversion_id.keys() + version_entity["id"]: version_entity + for version_entity in ayon_api.get_versions( + project_name, version_ids=version_ids ) - subversion_docs = get_versions(project_name, subversion_ids) - for subversion_doc in subversion_docs: - subversion_id = subversion_doc["_id"] - subversion_ids.discard(subversion_id) - h_version_docs = hero_versions_by_subversion_id[subversion_id] - for version_doc in h_version_docs: - version_doc["name"] = HeroVersionType( - subversion_doc["name"] - ) - version_doc["data"] = copy.deepcopy( - subversion_doc["data"] - ) - - for subversion_id in subversion_ids: - h_version_docs = hero_versions_by_subversion_id[subversion_id] - for version_doc in h_version_docs: - versions_by_id.pop(version_doc["_id"]) + }) product_ids = { - version_doc["parent"] - for version_doc in versions_by_id.values() + version_entity["productId"] + for version_entity in versions_by_id.values() } if not product_ids: return output - product_docs = get_subsets(project_name, product_ids) + products_by_id.update({ - product_doc["_id"]: product_doc - for product_doc in product_docs + product_entity["id"]: product_entity + for product_entity in ayon_api.get_products( + project_name, product_ids=product_ids + ) }) folder_ids = { - product_doc["parent"] - for product_doc in products_by_id.values() + product_entity["folderId"] + for product_entity in products_by_id.values() } if not folder_ids: return output - folder_docs = get_assets(project_name, folder_ids) folders_by_id.update({ - folder_doc["_id"]: folder_doc - for folder_doc in folder_docs + folder_entity["id"]: folder_entity + for folder_entity in ayon_api.get_folders( + project_name, folder_ids=folder_ids + ) }) return output @@ -530,27 +508,11 @@ class FilterProxyModel(QtCore.QSortFilterProxyModel): def _is_outdated(self, row, parent): """Return whether row is outdated. - A row is considered outdated if it has "version" and "highest_version" - data and in the internal data structure, and they are not of an - equal value. + A row is considered outdated if `isOutdated` data is true or not set. """ def outdated(node): - version = node.get("version", None) - highest = node.get("highest_version", None) - - # Always allow indices that have no version data at all - if version is None and highest is None: - return True - - # If either a version or highest is present but not the other - # consider the item invalid. - if not self._hierarchy_view: - # Skip this check if in hierarchy view, or the child item - # node will be hidden even it's actually outdated. - if version is None or highest is None: - return False - return version != highest + return node.get("isOutdated", True) index = self.sourceModel().index(row, self.filterKeyColumn(), parent) diff --git a/client/ayon_core/tools/sceneinventory/models/__init__.py b/client/ayon_core/tools/sceneinventory/models/__init__.py index c861d3c1a0..f840a45aa8 100644 --- a/client/ayon_core/tools/sceneinventory/models/__init__.py +++ b/client/ayon_core/tools/sceneinventory/models/__init__.py @@ -1,4 +1,4 @@ -from .site_sync import SiteSyncModel +from .sitesync import SiteSyncModel __all__ = ( diff --git a/client/ayon_core/tools/sceneinventory/models/site_sync.py b/client/ayon_core/tools/sceneinventory/models/sitesync.py similarity index 70% rename from client/ayon_core/tools/sceneinventory/models/site_sync.py rename to client/ayon_core/tools/sceneinventory/models/sitesync.py index c7bc0b756d..1a1f08bf02 100644 --- a/client/ayon_core/tools/sceneinventory/models/site_sync.py +++ b/client/ayon_core/tools/sceneinventory/models/sitesync.py @@ -1,4 +1,5 @@ -from ayon_core.client import get_representations +import ayon_api + from ayon_core.addon import AddonsManager NOT_SET = object() @@ -8,30 +9,30 @@ class SiteSyncModel: def __init__(self, controller): self._controller = controller - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET def reset(self): - self._sync_server_module = NOT_SET - self._sync_server_enabled = None + self._sitesync_addon = NOT_SET + self._sitesync_enabled = None self._active_site = NOT_SET self._remote_site = NOT_SET self._active_site_provider = NOT_SET self._remote_site_provider = NOT_SET - def is_sync_server_enabled(self): + def is_sitesync_enabled(self): """Site sync is enabled. Returns: bool: Is enabled or not. """ - self._cache_sync_server_module() - return self._sync_server_enabled + self._cache_sitesync_addon() + return self._sitesync_enabled def get_site_provider_icons(self): """Icon paths per provider. @@ -40,10 +41,10 @@ class SiteSyncModel: dict[str, str]: Path by provider name. """ - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return {} - site_sync_addon = self._get_sync_server_module() - return site_sync_addon.get_site_icons() + sitesync_addon = self._get_sitesync_addon() + return sitesync_addon.get_site_icons() def get_sites_information(self): return { @@ -64,19 +65,21 @@ class SiteSyncModel: } for repre_id in representation_ids } - if not self.is_sync_server_enabled(): + if not self.is_sitesync_enabled(): return output project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() - repre_docs = get_representations(project_name, representation_ids) + sitesync_addon = self._get_sitesync_addon() + repre_entities = ayon_api.get_representations( + project_name, representation_ids + ) active_site = self._get_active_site() remote_site = self._get_remote_site() - for repre_doc in repre_docs: - repre_output = output[repre_doc["_id"]] - result = site_sync.get_progress_for_repre( - repre_doc, active_site, remote_site + for repre_entity in repre_entities: + repre_output = output[repre_entity["id"]] + result = sitesync_addon.get_progress_for_repre( + repre_entity, active_site, remote_site ) repre_output["active_site"] = result[active_site] repre_output["remote_site"] = result[remote_site] @@ -92,7 +95,7 @@ class SiteSyncModel: """ project_name = self._controller.get_current_project_name() - site_sync = self._get_sync_server_module() + sitesync_addon = self._get_sitesync_addon() active_site = self._get_active_site() remote_site = self._get_remote_site() progress = self.get_representations_site_progress( @@ -112,22 +115,22 @@ class SiteSyncModel: site = remote_site if check_progress == 1: - site_sync.add_site( + sitesync_addon.add_site( project_name, repre_id, site, force=True ) - def _get_sync_server_module(self): - self._cache_sync_server_module() - return self._sync_server_module + def _get_sitesync_addon(self): + self._cache_sitesync_addon() + return self._sitesync_addon - def _cache_sync_server_module(self): - if self._sync_server_module is not NOT_SET: - return self._sync_server_module + def _cache_sitesync_addon(self): + if self._sitesync_addon is not NOT_SET: + return self._sitesync_addon manager = AddonsManager() - site_sync = manager.get("sync_server") - sync_enabled = site_sync is not None and site_sync.enabled - self._sync_server_module = site_sync - self._sync_server_enabled = sync_enabled + sitesync_addon = manager.get("sitesync") + sync_enabled = sitesync_addon is not None and sitesync_addon.enabled + self._sitesync_addon = sitesync_addon + self._sitesync_enabled = sync_enabled def _get_active_site(self): if self._active_site is NOT_SET: @@ -154,19 +157,19 @@ class SiteSyncModel: remote_site = None active_site_provider = None remote_site_provider = None - if self.is_sync_server_enabled(): - site_sync = self._get_sync_server_module() + if self.is_sitesync_enabled(): + sitesync_addon = self._get_sitesync_addon() project_name = self._controller.get_current_project_name() - active_site = site_sync.get_active_site(project_name) - remote_site = site_sync.get_remote_site(project_name) + active_site = sitesync_addon.get_active_site(project_name) + remote_site = sitesync_addon.get_remote_site(project_name) active_site_provider = "studio" remote_site_provider = "studio" if active_site != "studio": - active_site_provider = site_sync.get_provider_for_site( + active_site_provider = sitesync_addon.get_provider_for_site( project_name, active_site ) if remote_site != "studio": - remote_site_provider = site_sync.get_provider_for_site( + remote_site_provider = sitesync_addon.get_provider_for_site( project_name, remote_site ) diff --git a/client/ayon_core/tools/sceneinventory/switch_dialog/dialog.py b/client/ayon_core/tools/sceneinventory/switch_dialog/dialog.py index 89c3b652e1..4977ad13c6 100644 --- a/client/ayon_core/tools/sceneinventory/switch_dialog/dialog.py +++ b/client/ayon_core/tools/sceneinventory/switch_dialog/dialog.py @@ -1,18 +1,10 @@ import collections import logging +import ayon_api from qtpy import QtWidgets, QtCore import qtawesome -from ayon_core.client import ( - get_assets, - get_subset_by_name, - get_subsets, - get_versions, - get_hero_versions, - get_last_versions, - get_representations, -) from ayon_core.pipeline.load import ( discover_loader_plugins, switch_container, @@ -135,16 +127,16 @@ class SwitchAssetDialog(QtWidgets.QDialog): # first asset field, this also allows to see the placeholder value. accept_btn.setFocus() - self._folder_docs_by_id = {} - self._product_docs_by_id = {} - self._version_docs_by_id = {} - self._repre_docs_by_id = {} + self._folder_entities_by_id = {} + self._product_entities_by_id = {} + self._version_entities_by_id = {} + self._repre_entities_by_id = {} self._missing_folder_ids = set() self._missing_product_ids = set() self._missing_version_ids = set() self._missing_repre_ids = set() - self._missing_docs = False + self._missing_entities = False self._inactive_folder_ids = set() self._inactive_product_ids = set() @@ -245,10 +237,10 @@ class SwitchAssetDialog(QtWidgets.QDialog): def find_last_versions(self, product_ids): project_name = self._project_name - return get_last_versions( + return ayon_api.get_last_versions( project_name, - subset_ids=product_ids, - fields=["_id", "parent", "type"] + product_ids, + fields={"id", "productId", "version"} ) def _on_show_timer(self): @@ -265,124 +257,119 @@ class SwitchAssetDialog(QtWidgets.QDialog): } project_name = self._project_name - repres = list(get_representations( + repre_entities = list(ayon_api.get_representations( project_name, representation_ids=repre_ids, - archived=True, )) - repres_by_id = {str(repre["_id"]): repre for repre in repres} + repres_by_id = {r["id"]: r for r in repre_entities} - content_repre_docs_by_id = {} + content_repre_entities_by_id = {} inactive_repre_ids = set() missing_repre_ids = set() version_ids = set() for repre_id in repre_ids: - repre_doc = repres_by_id.get(repre_id) - if repre_doc is None: + repre_entity = repres_by_id.get(repre_id) + if repre_entity is None: missing_repre_ids.add(repre_id) - elif repres_by_id[repre_id]["type"] == "archived_representation": + elif not repres_by_id[repre_id]["active"]: inactive_repre_ids.add(repre_id) - version_ids.add(repre_doc["parent"]) + version_ids.add(repre_entity["versionId"]) else: - content_repre_docs_by_id[repre_id] = repre_doc - version_ids.add(repre_doc["parent"]) + content_repre_entities_by_id[repre_id] = repre_entity + version_ids.add(repre_entity["versionId"]) - version_docs = get_versions( + version_entities = ayon_api.get_versions( project_name, - version_ids=version_ids, - hero=True + version_ids=version_ids ) - content_version_docs_by_id = {} - for version_doc in version_docs: - version_id = version_doc["_id"] - content_version_docs_by_id[version_id] = version_doc + content_version_entities_by_id = {} + for version_entity in version_entities: + version_id = version_entity["id"] + content_version_entities_by_id[version_id] = version_entity missing_version_ids = set() product_ids = set() for version_id in version_ids: - version_doc = content_version_docs_by_id.get(version_id) - if version_doc is None: + version_entity = content_version_entities_by_id.get(version_id) + if version_entity is None: missing_version_ids.add(version_id) else: - product_ids.add(version_doc["parent"]) + product_ids.add(version_entity["productId"]) - product_docs = get_subsets( - project_name, subset_ids=product_ids, archived=True + product_entities = ayon_api.get_products( + project_name, product_ids=product_ids ) - product_docs_by_id = {sub["_id"]: sub for sub in product_docs} + product_entities_by_id = {p["id"]: p for p in product_entities} folder_ids = set() inactive_product_ids = set() missing_product_ids = set() - content_product_docs_by_id = {} + content_product_entities_by_id = {} for product_id in product_ids: - product_doc = product_docs_by_id.get(product_id) - if product_doc is None: + product_entity = product_entities_by_id.get(product_id) + if product_entity is None: missing_product_ids.add(product_id) - elif product_doc["type"] == "archived_subset": - folder_ids.add(product_doc["parent"]) - inactive_product_ids.add(product_id) else: - folder_ids.add(product_doc["parent"]) - content_product_docs_by_id[product_id] = product_doc + folder_ids.add(product_entity["folderId"]) + content_product_entities_by_id[product_id] = product_entity - folder_docs = get_assets( - project_name, asset_ids=folder_ids, archived=True + folder_entities = ayon_api.get_folders( + project_name, folder_ids=folder_ids, active=None ) - folder_docs_by_id = { - folder_doc["_id"]: folder_doc - for folder_doc in folder_docs + folder_entities_by_id = { + folder_entity["id"]: folder_entity + for folder_entity in folder_entities } missing_folder_ids = set() inactive_folder_ids = set() - content_folder_docs_by_id = {} + content_folder_entities_by_id = {} for folder_id in folder_ids: - folder_doc = folder_docs_by_id.get(folder_id) - if folder_doc is None: + folder_entity = folder_entities_by_id.get(folder_id) + if folder_entity is None: missing_folder_ids.add(folder_id) - elif folder_doc["type"] == "archived_asset": + elif not folder_entity["active"]: inactive_folder_ids.add(folder_id) else: - content_folder_docs_by_id[folder_id] = folder_doc + content_folder_entities_by_id[folder_id] = folder_entity # stash context values, works only for single representation init_folder_id = None init_product_name = None init_repre_name = None - if len(repres) == 1: - init_repre_doc = repres[0] - init_version_doc = content_version_docs_by_id.get( - init_repre_doc["parent"]) - init_product_doc = None - init_folder_doc = None - if init_version_doc: - init_product_doc = content_product_docs_by_id.get( - init_version_doc["parent"] + if len(repre_entities) == 1: + init_repre_entity = repre_entities[0] + init_version_entity = content_version_entities_by_id.get( + init_repre_entity["versionId"]) + init_product_entity = None + init_folder_entity = None + if init_version_entity: + init_product_entity = content_product_entities_by_id.get( + init_version_entity["productId"] ) - if init_product_doc: - init_folder_doc = content_folder_docs_by_id.get( - init_product_doc["parent"] + if init_product_entity: + init_folder_entity = content_folder_entities_by_id.get( + init_product_entity["folderId"] ) - if init_folder_doc: - init_repre_name = init_repre_doc["name"] - init_product_name = init_product_doc["name"] - init_folder_id = init_folder_doc["_id"] + if init_folder_entity: + init_repre_name = init_repre_entity["name"] + init_product_name = init_product_entity["name"] + init_folder_id = init_folder_entity["id"] self._init_folder_id = init_folder_id self._init_product_name = init_product_name self._init_repre_name = init_repre_name - self._folder_docs_by_id = content_folder_docs_by_id - self._product_docs_by_id = content_product_docs_by_id - self._version_docs_by_id = content_version_docs_by_id - self._repre_docs_by_id = content_repre_docs_by_id + self._folder_entities_by_id = content_folder_entities_by_id + self._product_entities_by_id = content_product_entities_by_id + self._version_entities_by_id = content_version_entities_by_id + self._repre_entities_by_id = content_repre_entities_by_id self._missing_folder_ids = missing_folder_ids self._missing_product_ids = missing_product_ids self._missing_version_ids = missing_version_ids self._missing_repre_ids = missing_repre_ids - self._missing_docs = ( + self._missing_entities = ( bool(missing_folder_ids) or bool(missing_version_ids) or bool(missing_product_ids) @@ -524,7 +511,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): and not selected_product_name and not selected_repre ): - return list(self._repre_docs_by_id.keys()) + return list(self._repre_entities_by_id.keys()) # Everything is selected # [x] [x] [x] @@ -571,68 +558,68 @@ class SwitchAssetDialog(QtWidgets.QDialog): self, folder_id, selected_product_name, selected_repre ): project_name = self._project_name - product_doc = get_subset_by_name( + product_entity = ayon_api.get_product_by_name( project_name, selected_product_name, folder_id, - fields=["_id"] + fields={"id"} ) - product_id = product_doc["_id"] + product_id = product_entity["id"] last_versions_by_product_id = self.find_last_versions([product_id]) - version_doc = last_versions_by_product_id.get(product_id) - if not version_doc: + version_entity = last_versions_by_product_id.get(product_id) + if not version_entity: return [] - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, - version_ids=[version_doc["_id"]], - representation_names=[selected_repre], - fields=["_id"] + version_ids={version_entity["id"]}, + representation_names={selected_repre}, + fields={"id"} ) - return [repre_doc["_id"] for repre_doc in repre_docs] + return {repre_entity["id"] for repre_entity in repre_entities} def _get_current_output_repre_ids_xxo(self, folder_id, product_name): project_name = self._project_name - product_doc = get_subset_by_name( + product_entity = ayon_api.get_product_by_name( project_name, product_name, folder_id, - fields=["_id"] + fields={"id"} ) - if not product_doc: + if not product_entity: return [] repre_names = set() - for repre_doc in self._repre_docs_by_id.values(): - repre_names.add(repre_doc["name"]) + for repre_entity in self._repre_entities_by_id.values(): + repre_names.add(repre_entity["name"]) # TODO where to take version ids? version_ids = [] - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, representation_names=repre_names, version_ids=version_ids, - fields=["_id"] + fields={"id"} ) - return [repre_doc["_id"] for repre_doc in repre_docs] + return {repre_entity["id"] for repre_entity in repre_entities} def _get_current_output_repre_ids_xox(self, folder_id, selected_repre): product_names = { - product_doc["name"] - for product_doc in self._product_docs_by_id.values() + product_entity["name"] + for product_entity in self._product_entities_by_id.values() } project_name = self._project_name - product_docs = get_subsets( + product_entities = ayon_api.get_products( project_name, - asset_ids=[folder_id], - subset_names=product_names, - fields=["_id", "name"] + folder_ids=[folder_id], + product_names=product_names, + fields={"id", "name"} ) product_name_by_id = { - product_doc["_id"]: product_doc["name"] - for product_doc in product_docs + product_entity["id"]: product_entity["name"] + for product_entity in product_entities } product_ids = list(product_name_by_id.keys()) last_versions_by_product_id = self.find_last_versions(product_ids) @@ -640,35 +627,37 @@ class SwitchAssetDialog(QtWidgets.QDialog): for product_id, last_version in last_versions_by_product_id.items(): product_name = product_name_by_id[product_id] last_version_id_by_product_name[product_name] = ( - last_version["_id"] + last_version["id"] ) - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, version_ids=last_version_id_by_product_name.values(), - representation_names=[selected_repre], - fields=["_id"] + representation_names={selected_repre}, + fields={"id"} ) - return [repre_doc["_id"] for repre_doc in repre_docs] + return {repre_entity["id"] for repre_entity in repre_entities} def _get_current_output_repre_ids_xoo(self, folder_id): project_name = self._project_name repres_by_product_name = collections.defaultdict(set) - for repre_doc in self._repre_docs_by_id.values(): - version_doc = self._version_docs_by_id[repre_doc["parent"]] - product_doc = self._product_docs_by_id[version_doc["parent"]] - product_name = product_doc["name"] - repres_by_product_name[product_name].add(repre_doc["name"]) + for repre_entity in self._repre_entities_by_id.values(): + version_id = repre_entity["versionId"] + version_entity = self._version_entities_by_id[version_id] + product_id = version_entity["productId"] + product_entity = self._product_entities_by_id[product_id] + product_name = product_entity["name"] + repres_by_product_name[product_name].add(repre_entity["name"]) - product_docs = list(get_subsets( + product_entities = list(ayon_api.get_products( project_name, - asset_ids=[folder_id], - subset_names=repres_by_product_name.keys(), - fields=["_id", "name"] + folder_ids=[folder_id], + product_names=repres_by_product_name.keys(), + fields={"id", "name"} )) product_name_by_id = { - product_doc["_id"]: product_doc["name"] - for product_doc in product_docs + product_entity["id"]: product_entity["name"] + for product_entity in product_entities } product_ids = list(product_name_by_id.keys()) last_versions_by_product_id = self.find_last_versions(product_ids) @@ -676,7 +665,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): for product_id, last_version in last_versions_by_product_id.items(): product_name = product_name_by_id[product_id] last_version_id_by_product_name[product_name] = ( - last_version["_id"] + last_version["id"] ) repre_names_by_version_id = {} @@ -686,97 +675,103 @@ class SwitchAssetDialog(QtWidgets.QDialog): if version_id is not None: repre_names_by_version_id[version_id] = list(repre_names) - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, names_by_version_ids=repre_names_by_version_id, - fields=["_id"] + fields={"id"} ) - return [repre_doc["_id"] for repre_doc in repre_docs] + return {repre_entity["id"] for repre_entity in repre_entities} def _get_current_output_repre_ids_oxx( self, product_name, selected_repre ): project_name = self._project_name - product_docs = get_subsets( + product_entities = ayon_api.get_products( project_name, - asset_ids=self._folder_docs_by_id.keys(), - subset_names=[product_name], - fields=["_id"] + folder_ids=self._folder_entities_by_id.keys(), + product_names=[product_name], + fields={"id"} ) - product_ids = [product_doc["_id"] for product_doc in product_docs] + product_ids = { + product_entity["id"] for product_entity in product_entities + } last_versions_by_product_id = self.find_last_versions(product_ids) - last_version_ids = [ - last_version["_id"] + last_version_ids = { + last_version["id"] for last_version in last_versions_by_product_id.values() - ] - repre_docs = get_representations( + } + + repre_entities = ayon_api.get_representations( project_name, version_ids=last_version_ids, - representation_names=[selected_repre], - fields=["_id"] + representation_names={selected_repre}, + fields={"id"} ) - return [repre_doc["_id"] for repre_doc in repre_docs] + return {repre_entity["id"] for repre_entity in repre_entities} def _get_current_output_repre_ids_oxo(self, product_name): project_name = self._project_name - product_docs = get_subsets( + product_entities = ayon_api.get_products( project_name, - asset_ids=self._folder_docs_by_id.keys(), - subset_names=[product_name], - fields=["_id", "parent"] + folder_ids=self._folder_entities_by_id.keys(), + product_names={product_name}, + fields={"id", "folderId"} ) - product_docs_by_id = { - product_doc["_id"]: product_doc - for product_doc in product_docs + product_entities_by_id = { + product_entity["id"]: product_entity + for product_entity in product_entities } - if not product_docs: + if not product_entities_by_id: return list() last_versions_by_product_id = self.find_last_versions( - product_docs_by_id.keys() + product_entities_by_id.keys() ) product_id_by_version_id = {} for product_id, last_version in last_versions_by_product_id.items(): - version_id = last_version["_id"] + version_id = last_version["id"] product_id_by_version_id[version_id] = product_id if not product_id_by_version_id: return list() repre_names_by_folder_id = collections.defaultdict(set) - for repre_doc in self._repre_docs_by_id.values(): - version_doc = self._version_docs_by_id[repre_doc["parent"]] - product_doc = self._product_docs_by_id[version_doc["parent"]] - folder_doc = self._folder_docs_by_id[product_doc["parent"]] - folder_id = folder_doc["_id"] - repre_names_by_folder_id[folder_id].add(repre_doc["name"]) + for repre_entity in self._repre_entities_by_id.values(): + version_id = repre_entity["versionId"] + version_entity = self._version_entities_by_id[version_id] + product_id = version_entity["productId"] + product_entity = self._product_entities_by_id[product_id] + folder_id = product_entity["folderId"] + folder_entity = self._folder_entities_by_id[folder_id] + folder_id = folder_entity["id"] + repre_names_by_folder_id[folder_id].add(repre_entity["name"]) repre_names_by_version_id = {} for last_version_id, product_id in product_id_by_version_id.items(): - product_doc = product_docs_by_id[product_id] - folder_id = product_doc["parent"] + product_entity = product_entities_by_id[product_id] + folder_id = product_entity["folderId"] repre_names = repre_names_by_folder_id.get(folder_id) if not repre_names: continue repre_names_by_version_id[last_version_id] = repre_names - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, names_by_version_ids=repre_names_by_version_id, - fields=["_id"] + fields={"id"} ) - return [repre_doc["_id"] for repre_doc in repre_docs] + return {repre_entity["id"] for repre_entity in repre_entities} def _get_current_output_repre_ids_oox(self, selected_repre): project_name = self._project_name - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, representation_names=[selected_repre], - version_ids=self._version_docs_by_id.keys(), - fields=["_id"] + version_ids=self._version_entities_by_id.keys(), + fields={"id"} ) - return [repre_doc["_id"] for repre_doc in repre_docs] + return {repre_entity["id"] for repre_entity in repre_entities} def _get_product_box_values(self): project_name = self._project_name @@ -784,18 +779,18 @@ class SwitchAssetDialog(QtWidgets.QDialog): if selected_folder_id: folder_ids = [selected_folder_id] else: - folder_ids = list(self._folder_docs_by_id.keys()) + folder_ids = list(self._folder_entities_by_id.keys()) - product_docs = get_subsets( + product_entities = ayon_api.get_products( project_name, - asset_ids=folder_ids, - fields=["parent", "name"] + folder_ids=folder_ids, + fields={"folderId", "name"} ) product_names_by_parent_id = collections.defaultdict(set) - for product_doc in product_docs: - product_names_by_parent_id[product_doc["parent"]].add( - product_doc["name"] + for product_entity in product_entities: + product_names_by_parent_id[product_entity["folderId"]].add( + product_entity["name"] ) possible_product_names = None @@ -824,15 +819,17 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [ ] [ ] [?] if not selected_folder_id and not selected_product_name: # Find all representations of selection's products - possible_repres = get_representations( + possible_repres = ayon_api.get_representations( project_name, - version_ids=self._version_docs_by_id.keys(), - fields=["parent", "name"] + version_ids=self._version_entities_by_id.keys(), + fields={"versionId", "name"} ) possible_repres_by_parent = collections.defaultdict(set) for repre in possible_repres: - possible_repres_by_parent[repre["parent"]].add(repre["name"]) + possible_repres_by_parent[repre["versionId"]].add( + repre["name"] + ) output_repres = None for repre_names in possible_repres_by_parent.values(): @@ -848,44 +845,44 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [x] [?] if selected_folder_id and selected_product_name: - product_doc = get_subset_by_name( + product_entity = ayon_api.get_product_by_name( project_name, selected_product_name, selected_folder_id, - fields=["_id"] + fields={"id"} ) - product_id = product_doc["_id"] + product_id = product_entity["id"] last_versions_by_product_id = self.find_last_versions([product_id]) - version_doc = last_versions_by_product_id.get(product_id) - repre_docs = get_representations( + version_entity = last_versions_by_product_id.get(product_id) + repre_entities = ayon_api.get_representations( project_name, - version_ids=[version_doc["_id"]], - fields=["name"] + version_ids={version_entity["id"]}, + fields={"name"} ) - return [ - repre_doc["name"] - for repre_doc in repre_docs - ] + return { + repre_entity["name"] + for repre_entity in repre_entities + } # [x] [ ] [?] # If only folder is selected if selected_folder_id: # Filter products by names from content product_names = { - product_doc["name"] - for product_doc in self._product_docs_by_id.values() + product_entity["name"] + for product_entity in self._product_entities_by_id.values() } - product_docs = get_subsets( + product_entities = ayon_api.get_products( project_name, - asset_ids=[selected_folder_id], - subset_names=product_names, - fields=["_id"] + folder_ids={selected_folder_id}, + product_names=product_names, + fields={"id"} ) product_ids = { - product_doc["_id"] - for product_doc in product_docs + product_entity["id"] + for product_entity in product_entities } if not product_ids: return list() @@ -895,24 +892,24 @@ class SwitchAssetDialog(QtWidgets.QDialog): for product_id, last_version in ( last_versions_by_product_id.items() ): - version_id = last_version["_id"] + version_id = last_version["id"] product_id_by_version_id[version_id] = product_id if not product_id_by_version_id: return list() - repre_docs = list(get_representations( + repre_entities = list(ayon_api.get_representations( project_name, version_ids=product_id_by_version_id.keys(), - fields=["name", "parent"] + fields={"name", "versionId"} )) - if not repre_docs: + if not repre_entities: return list() repre_names_by_parent = collections.defaultdict(set) - for repre_doc in repre_docs: - repre_names_by_parent[repre_doc["parent"]].add( - repre_doc["name"] + for repre_entity in repre_entities: + repre_names_by_parent[repre_entity["versionId"]].add( + repre_entity["name"] ) available_repres = None @@ -926,46 +923,46 @@ class SwitchAssetDialog(QtWidgets.QDialog): return list(available_repres) # [ ] [x] [?] - product_docs = list(get_subsets( + product_entities = list(ayon_api.get_products( project_name, - asset_ids=self._folder_docs_by_id.keys(), - subset_names=[selected_product_name], - fields=["_id", "parent"] + folder_ids=self._folder_entities_by_id.keys(), + product_names=[selected_product_name], + fields={"id", "folderId"} )) - if not product_docs: + if not product_entities: return list() - product_docs_by_id = { - product_doc["_id"]: product_doc - for product_doc in product_docs + product_entities_by_id = { + product_entity["id"]: product_entity + for product_entity in product_entities } last_versions_by_product_id = self.find_last_versions( - product_docs_by_id.keys() + product_entities_by_id.keys() ) product_id_by_version_id = {} for product_id, last_version in last_versions_by_product_id.items(): - version_id = last_version["_id"] + version_id = last_version["id"] product_id_by_version_id[version_id] = product_id if not product_id_by_version_id: return list() - repre_docs = list( - get_representations( + repre_entities = list( + ayon_api.get_representations( project_name, version_ids=product_id_by_version_id.keys(), - fields=["name", "parent"] + fields={"name", "versionId"} ) ) - if not repre_docs: + if not repre_entities: return list() repre_names_by_folder_id = collections.defaultdict(set) - for repre_doc in repre_docs: - product_id = product_id_by_version_id[repre_doc["parent"]] - folder_id = product_docs_by_id[product_id]["parent"] - repre_names_by_folder_id[folder_id].add(repre_doc["name"]) + for repre_entity in repre_entities: + product_id = product_id_by_version_id[repre_entity["versionId"]] + folder_id = product_entities_by_id[product_id]["folderId"] + repre_names_by_folder_id[folder_id].add(repre_entity["name"]) available_repres = None for repre_names in repre_names_by_folder_id.values(): @@ -981,7 +978,7 @@ class SwitchAssetDialog(QtWidgets.QDialog): selected_folder_id = self._folders_field.get_selected_folder_id() if ( selected_folder_id is None - and (self._missing_docs or self._inactive_folder_ids) + and (self._missing_entities or self._inactive_folder_ids) ): validation_state.folder_ok = False @@ -1003,17 +1000,17 @@ class SwitchAssetDialog(QtWidgets.QDialog): # [x] [ ] [?] project_name = self._project_name - product_docs = get_subsets( - project_name, asset_ids=[selected_folder_id], fields=["name"] + product_entities = ayon_api.get_products( + project_name, folder_ids=[selected_folder_id], fields={"name"} ) product_names = set( - product_doc["name"] - for product_doc in product_docs + product_entity["name"] + for product_entity in product_entities ) - for product_doc in self._product_docs_by_id.values(): - if product_doc["name"] not in product_names: + for product_entity in self._product_entities_by_id.values(): + if product_entity["name"] not in product_names: validation_state.product_ok = False break @@ -1043,49 +1040,49 @@ class SwitchAssetDialog(QtWidgets.QDialog): selected_folder_id is not None and selected_product_name is not None ): - product_doc = get_subset_by_name( + product_entity = ayon_api.get_product_by_name( project_name, selected_product_name, selected_folder_id, - fields=["_id"] + fields={"id"} ) - product_id = product_doc["_id"] + product_id = product_entity["id"] last_versions_by_product_id = self.find_last_versions([product_id]) last_version = last_versions_by_product_id.get(product_id) if not last_version: validation_state.repre_ok = False return - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, - version_ids=[last_version["_id"]], - fields=["name"] + version_ids={last_version["id"]}, + fields={"name"} ) repre_names = set( - repre_doc["name"] - for repre_doc in repre_docs + repre_entity["name"] + for repre_entity in repre_entities ) - for repre_doc in self._repre_docs_by_id.values(): - if repre_doc["name"] not in repre_names: + for repre_entity in self._repre_entities_by_id.values(): + if repre_entity["name"] not in repre_names: validation_state.repre_ok = False break return # [x] [ ] [ ] if selected_folder_id is not None: - product_docs = list(get_subsets( + product_entities = list(ayon_api.get_products( project_name, - asset_ids=[selected_folder_id], - fields=["_id", "name"] + folder_ids={selected_folder_id}, + fields={"id", "name"} )) product_name_by_id = {} product_ids = set() - for product_doc in product_docs: - product_id = product_doc["_id"] + for product_entity in product_entities: + product_id = product_entity["id"] product_ids.add(product_id) - product_name_by_id[product_id] = product_doc["name"] + product_name_by_id[product_id] = product_entity["name"] last_versions_by_product_id = self.find_last_versions(product_ids) @@ -1093,66 +1090,71 @@ class SwitchAssetDialog(QtWidgets.QDialog): for product_id, last_version in ( last_versions_by_product_id.items() ): - version_id = last_version["_id"] + version_id = last_version["id"] product_id_by_version_id[version_id] = product_id - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, version_ids=product_id_by_version_id.keys(), - fields=["name", "parent"] + fields={"name", "versionId"} ) repres_by_product_name = collections.defaultdict(set) - for repre_doc in repre_docs: - product_id = product_id_by_version_id[repre_doc["parent"]] + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + product_id = product_id_by_version_id[version_id] product_name = product_name_by_id[product_id] - repres_by_product_name[product_name].add(repre_doc["name"]) + repres_by_product_name[product_name].add(repre_entity["name"]) - for repre_doc in self._repre_docs_by_id.values(): - version_doc = self._version_docs_by_id[repre_doc["parent"]] - product_doc = self._product_docs_by_id[version_doc["parent"]] - repre_names = repres_by_product_name[product_doc["name"]] - if repre_doc["name"] not in repre_names: + for repre_entity in self._repre_entities_by_id.values(): + version_id = repre_entity["versionId"] + version_entity = self._version_entities_by_id[version_id] + product_id = version_entity["productId"] + product_entity = self._product_entities_by_id[product_id] + repre_names = repres_by_product_name[product_entity["name"]] + if repre_entity["name"] not in repre_names: validation_state.repre_ok = False break return # [ ] [x] [ ] - # Product documents - product_docs = get_subsets( + # Product entities + product_entities = ayon_api.get_products( project_name, - asset_ids=self._folder_docs_by_id.keys(), - subset_names=[selected_product_name], - fields=["_id", "name", "parent"] + folder_ids=self._folder_entities_by_id.keys(), + product_names={selected_product_name}, + fields={"id", "name", "folderId"} ) - product_docs_by_id = {} - for product_doc in product_docs: - product_docs_by_id[product_doc["_id"]] = product_doc + product_entities_by_id = {} + for product_entity in product_entities: + product_entities_by_id[product_entity["id"]] = product_entity last_versions_by_product_id = self.find_last_versions( - product_docs_by_id.keys() + product_entities_by_id.keys() ) product_id_by_version_id = {} for product_id, last_version in last_versions_by_product_id.items(): - version_id = last_version["_id"] + version_id = last_version["id"] product_id_by_version_id[version_id] = product_id - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, version_ids=product_id_by_version_id.keys(), - fields=["name", "parent"] + fields={"name", "versionId"} ) repres_by_folder_id = collections.defaultdict(set) - for repre_doc in repre_docs: - product_id = product_id_by_version_id[repre_doc["parent"]] - folder_id = product_docs_by_id[product_id]["parent"] - repres_by_folder_id[folder_id].add(repre_doc["name"]) + for repre_entity in repre_entities: + product_id = product_id_by_version_id[repre_entity["versionId"]] + folder_id = product_entities_by_id[product_id]["folderId"] + repres_by_folder_id[folder_id].add(repre_entity["name"]) - for repre_doc in self._repre_docs_by_id.values(): - version_doc = self._version_docs_by_id[repre_doc["parent"]] - product_doc = self._product_docs_by_id[version_doc["parent"]] - folder_id = product_doc["parent"] + for repre_entity in self._repre_entities_by_id.values(): + version_id = repre_entity["versionId"] + version_entity = self._version_entities_by_id[version_id] + product_id = version_entity["productId"] + product_entity = self._product_entities_by_id[product_id] + folder_id = product_entity["folderId"] repre_names = repres_by_folder_id[folder_id] - if repre_doc["name"] not in repre_names: + if repre_entity["name"] not in repre_names: validation_state.repre_ok = False break @@ -1182,57 +1184,59 @@ class SwitchAssetDialog(QtWidgets.QDialog): if selected_folder_id: folder_ids = {selected_folder_id} else: - folder_ids = set(self._folder_docs_by_id.keys()) + folder_ids = set(self._folder_entities_by_id.keys()) product_names = None if selected_product_name: product_names = [selected_product_name] - product_docs = list(get_subsets( + product_entities = list(ayon_api.get_products( project_name, - subset_names=product_names, - asset_ids=folder_ids + product_names=product_names, + folder_ids=folder_ids )) product_ids = set() - product_docs_by_parent_and_name = collections.defaultdict(dict) - for product_doc in product_docs: - product_ids.add(product_doc["_id"]) - folder_id = product_doc["parent"] - name = product_doc["name"] - product_docs_by_parent_and_name[folder_id][name] = product_doc + product_entities_by_parent_and_name = collections.defaultdict(dict) + for product_entity in product_entities: + product_ids.add(product_entity["id"]) + folder_id = product_entity["folderId"] + name = product_entity["name"] + product_entities_by_parent_and_name[folder_id][name] = ( + product_entity + ) # versions - _version_docs = get_versions(project_name, subset_ids=product_ids) - version_docs = list(reversed( - sorted(_version_docs, key=lambda item: item["name"]) - )) - - hero_version_docs = list(get_hero_versions( - project_name, subset_ids=product_ids + _version_entities = ayon_api.get_versions( + project_name, product_ids=product_ids + ) + version_entities = list(reversed( + sorted(_version_entities, key=lambda item: item["version"]) )) version_ids = set() - version_docs_by_parent_id_and_name = collections.defaultdict(dict) - for version_doc in version_docs: - version_ids.add(version_doc["_id"]) - product_id = version_doc["parent"] - name = version_doc["name"] - version_docs_by_parent_id_and_name[product_id][name] = version_doc + version_entities_by_product_id = collections.defaultdict(dict) + hero_version_entities_by_product_id = {} + for version_entity in version_entities: + version_ids.add(version_entity["id"]) + product_id = version_entity["productId"] + version = version_entity["version"] + if version < 0: + hero_version_entities_by_product_id[product_id] = ( + version_entity + ) + continue + version_entities_by_product_id[product_id][version] = ( + version_entity + ) - hero_version_docs_by_parent_id = {} - for hero_version_doc in hero_version_docs: - version_ids.add(hero_version_doc["_id"]) - parent_id = hero_version_doc["parent"] - hero_version_docs_by_parent_id[parent_id] = hero_version_doc - - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, version_ids=version_ids ) - repre_docs_by_parent_id_by_name = collections.defaultdict(dict) - for repre_doc in repre_docs: - parent_id = repre_doc["parent"] - name = repre_doc["name"] - repre_docs_by_parent_id_by_name[parent_id][name] = repre_doc + repre_entities_by_name_version_id = collections.defaultdict(dict) + for repre_entity in repre_entities: + version_id = repre_entity["versionId"] + name = repre_entity["name"] + repre_entities_by_name_version_id[version_id][name] = repre_entity for container in self._items: self._switch_container( @@ -1241,10 +1245,10 @@ class SwitchAssetDialog(QtWidgets.QDialog): selected_folder_id, selected_product_name, selected_representation, - product_docs_by_parent_and_name, - version_docs_by_parent_id_and_name, - hero_version_docs_by_parent_id, - repre_docs_by_parent_id_by_name, + product_entities_by_parent_and_name, + version_entities_by_product_id, + hero_version_entities_by_product_id, + repre_entities_by_name_version_id, ) self.switched.emit() @@ -1258,81 +1262,81 @@ class SwitchAssetDialog(QtWidgets.QDialog): selected_folder_id, selected_product_name, selected_representation, - product_docs_by_parent_and_name, - version_docs_by_parent_id_and_name, - hero_version_docs_by_parent_id, - repre_docs_by_parent_id_by_name, + product_entities_by_parent_and_name, + version_entities_by_product_id, + hero_version_entities_by_product_id, + repre_entities_by_name_version_id, ): container_repre_id = container["representation"] - container_repre = self._repre_docs_by_id[container_repre_id] + container_repre = self._repre_entities_by_id[container_repre_id] container_repre_name = container_repre["name"] - container_version_id = container_repre["parent"] + container_version_id = container_repre["versionId"] - container_version = self._version_docs_by_id[container_version_id] + container_version = self._version_entities_by_id[container_version_id] - container_product_id = container_version["parent"] - container_product = self._product_docs_by_id[container_product_id] + container_product_id = container_version["productId"] + container_product = self._product_entities_by_id[container_product_id] container_product_name = container_product["name"] - container_folder_id = container_product["parent"] + container_folder_id = container_product["folderId"] if selected_folder_id: folder_id = selected_folder_id else: folder_id = container_folder_id - products_by_name = product_docs_by_parent_and_name[folder_id] + products_by_name = product_entities_by_parent_and_name[folder_id] if selected_product_name: - product_doc = products_by_name[selected_product_name] + product_entity = products_by_name[selected_product_name] else: - product_doc = products_by_name[container_product["name"]] + product_entity = products_by_name[container_product["name"]] - repre_doc = None - product_id = product_doc["_id"] - if container_version["type"] == "hero_version": - hero_version = hero_version_docs_by_parent_id.get( + repre_entity = None + product_id = product_entity["id"] + if container_version["version"] < 0: + hero_version = hero_version_entities_by_product_id.get( product_id ) if hero_version: - _repres = repre_docs_by_parent_id_by_name.get( - hero_version["_id"] + _repres = repre_entities_by_name_version_id.get( + hero_version["id"] ) if selected_representation: - repre_doc = _repres.get(selected_representation) + repre_entity = _repres.get(selected_representation) else: - repre_doc = _repres.get(container_repre_name) + repre_entity = _repres.get(container_repre_name) - if not repre_doc: - version_docs_by_name = ( - version_docs_by_parent_id_and_name[product_id] + if not repre_entity: + version_entities_by_version = ( + version_entities_by_product_id[product_id] ) - # If asset or subset are selected for switching, we use latest + # If folder or product are selected for switching, we use latest # version else we try to keep the current container version. - version_name = None + version = None if ( selected_folder_id in (None, container_folder_id) and selected_product_name in (None, container_product_name) ): - version_name = container_version.get("name") + version = container_version.get("version") - version_doc = None - if version_name is not None: - version_doc = version_docs_by_name.get(version_name) + version_entity = None + if version is not None: + version_entity = version_entities_by_version.get(version) - if version_doc is None: - version_name = max(version_docs_by_name) - version_doc = version_docs_by_name[version_name] + if version_entity is None: + version_name = max(version_entities_by_version) + version_entity = version_entities_by_version[version_name] - version_id = version_doc["_id"] - repres_by_name = repre_docs_by_parent_id_by_name[version_id] + version_id = version_entity["id"] + repres_by_name = repre_entities_by_name_version_id[version_id] if selected_representation: - repre_doc = repres_by_name[selected_representation] + repre_entity = repres_by_name[selected_representation] else: - repre_doc = repres_by_name[container_repre_name] + repre_entity = repres_by_name[container_repre_name] error = None try: - switch_container(container, repre_doc, loader) + switch_container(container, repre_entity, loader) except ( LoaderSwitchNotImplementedError, IncompatibleLoaderError, diff --git a/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py b/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py index e46c28474f..3137e70214 100644 --- a/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py +++ b/client/ayon_core/tools/sceneinventory/switch_dialog/folders_input.py @@ -5,8 +5,8 @@ from ayon_core.tools.utils import ( PlaceholderLineEdit, BaseClickableFrame, set_style_property, + FoldersWidget, ) -from ayon_core.tools.ayon_utils.widgets import FoldersWidget NOT_SET = object() diff --git a/client/ayon_core/tools/sceneinventory/view.py b/client/ayon_core/tools/sceneinventory/view.py index 80c89338f5..5cbd4daf70 100644 --- a/client/ayon_core/tools/sceneinventory/view.py +++ b/client/ayon_core/tools/sceneinventory/view.py @@ -4,16 +4,10 @@ import logging import itertools from functools import partial +import ayon_api from qtpy import QtWidgets, QtCore import qtawesome -from ayon_core.client import ( - get_version_by_id, - get_versions, - get_hero_versions, - get_representation_by_id, - get_representations, -) from ayon_core import style from ayon_core.pipeline import ( HeroVersionType, @@ -97,50 +91,54 @@ class SceneInventoryView(QtWidgets.QTreeView): pass project_name = self._controller.get_current_project_name() - repre_docs = get_representations( - project_name, representation_ids=repre_ids, fields=["parent"] + repre_entities = ayon_api.get_representations( + project_name, + representation_ids=repre_ids, + fields={"versionId"} ) version_ids = { - repre_doc["parent"] - for repre_doc in repre_docs + repre_entity["versionId"] + for repre_entity in repre_entities } - loaded_versions = get_versions( - project_name, version_ids=version_ids, hero=True + loaded_versions = ayon_api.get_versions( + project_name, version_ids=version_ids ) loaded_hero_versions = [] - versions_by_parent_id = collections.defaultdict(list) + versions_by_product_id = collections.defaultdict(list) product_ids = set() - for version in loaded_versions: - if version["type"] == "hero_version": - loaded_hero_versions.append(version) + for version_entity in loaded_versions: + version = version_entity["version"] + if version < 0: + loaded_hero_versions.append(version_entity) else: - parent_id = version["parent"] - versions_by_parent_id[parent_id].append(version) - product_ids.add(parent_id) + product_id = version_entity["productId"] + versions_by_product_id[product_id].append(version_entity) + product_ids.add(product_id) - all_versions = get_versions( - project_name, subset_ids=product_ids, hero=True + all_versions = ayon_api.get_versions( + project_name, product_ids=product_ids ) hero_versions = [] - versions = [] - for version in all_versions: - if version["type"] == "hero_version": - hero_versions.append(version) + version_entities = [] + for version_entity in all_versions: + version = version_entity["version"] + if version < 0: + hero_versions.append(version_entity) else: - versions.append(version) + version_entities.append(version_entity) has_loaded_hero_versions = len(loaded_hero_versions) > 0 has_available_hero_version = len(hero_versions) > 0 has_outdated = False - for version in versions: - parent_id = version["parent"] - current_versions = versions_by_parent_id[parent_id] + for version_entity in version_entities: + product_id = version_entity["productId"] + current_versions = versions_by_product_id[product_id] for current_version in current_versions: - if current_version["name"] < version["name"]: + if current_version["version"] < version_entity["version"]: has_outdated = True break @@ -155,46 +153,52 @@ class SceneInventoryView(QtWidgets.QTreeView): for item in items } - repre_docs = get_representations( + repre_entities = ayon_api.get_representations( project_name, representation_ids=repre_ids, - fields=["parent"] + fields={"id", "versionId"} ) - version_ids = set() version_id_by_repre_id = {} - for repre_doc in repre_docs: - version_id = repre_doc["parent"] - repre_id = str(repre_doc["_id"]) + for repre_entity in repre_entities: + repre_id = repre_entity["id"] + version_id = repre_entity["versionId"] version_id_by_repre_id[repre_id] = version_id - version_ids.add(version_id) + version_ids = set(version_id_by_repre_id.values()) - hero_versions = get_hero_versions( + src_version_entity_by_id = { + version_entity["id"]: version_entity + for version_entity in ayon_api.get_versions( + project_name, + version_ids, + fields={"productId", "version"} + ) + } + hero_versions_by_product_id = {} + for version_entity in src_version_entity_by_id.values(): + version = version_entity["version"] + if version < 0: + product_id = version_entity["productId"] + hero_versions_by_product_id[product_id] = abs(version) + + if not hero_versions_by_product_id: + return + + standard_versions = ayon_api.get_versions( project_name, - version_ids=version_ids, - fields=["version_id"] + product_ids=hero_versions_by_product_id.keys(), + versions=hero_versions_by_product_id.values() ) - - hero_src_version_ids = set() - for hero_version in hero_versions: - version_id = hero_version["version_id"] - hero_src_version_ids.add(version_id) - hero_version_id = hero_version["_id"] - for _repre_id, current_version_id in ( - version_id_by_repre_id.items() - ): - if current_version_id == hero_version_id: - version_id_by_repre_id[_repre_id] = version_id - - version_docs = get_versions( - project_name, - version_ids=hero_src_version_ids, - fields=["name"] - ) - version_name_by_id = {} - for version_doc in version_docs: - version_name_by_id[version_doc["_id"]] = \ - version_doc["name"] + standard_version_by_product_id = { + product_id: {} + for product_id in hero_versions_by_product_id.keys() + } + for version_entity in standard_versions: + product_id = version_entity["productId"] + version = version_entity["version"] + standard_version_by_product_id[product_id][version] = ( + version_entity + ) # Specify version per item to update to update_items = [] @@ -202,10 +206,20 @@ class SceneInventoryView(QtWidgets.QTreeView): for item in items: repre_id = item["representation"] version_id = version_id_by_repre_id.get(repre_id) - version_name = version_name_by_id.get(version_id) - if version_name is not None: + version_entity = src_version_entity_by_id.get(version_id) + if not version_entity or version_entity["version"] >= 0: + continue + product_id = version_entity["productId"] + version_entities_by_version = ( + standard_version_by_product_id[product_id] + ) + new_version = hero_versions_by_product_id.get(product_id) + new_version_entity = version_entities_by_version.get( + new_version + ) + if new_version_entity is not None: update_items.append(item) - update_versions.append(version_name) + update_versions.append(new_version) self._update_containers(update_items, update_versions) update_icon = qtawesome.icon( @@ -249,8 +263,9 @@ class SceneInventoryView(QtWidgets.QTreeView): menu ) change_to_hero.triggered.connect( - lambda: self._update_containers(items, - version=HeroVersionType(-1)) + lambda: self._update_containers( + items, version=HeroVersionType(-1) + ) ) # set version @@ -296,9 +311,9 @@ class SceneInventoryView(QtWidgets.QTreeView): menu.addAction(remove_action) - self._handle_sync_server(menu, repre_ids) + self._handle_sitesync(menu, repre_ids) - def _handle_sync_server(self, menu, repre_ids): + def _handle_sitesync(self, menu, repre_ids): """Adds actions for download/upload when SyncServer is enabled Args: @@ -309,7 +324,7 @@ class SceneInventoryView(QtWidgets.QTreeView): (OptionMenu) """ - if not self._controller.is_sync_server_enabled(): + if not self._controller.is_sitesync_enabled(): return menu.addSeparator() @@ -608,44 +623,31 @@ class SceneInventoryView(QtWidgets.QTreeView): project_name = self._controller.get_current_project_name() # Get available versions for active representation - repre_doc = get_representation_by_id( + repre_entity = ayon_api.get_representation_by_id( project_name, active["representation"], - fields=["parent"] + fields={"versionId"} ) - repre_version_doc = get_version_by_id( + repre_version_entity = ayon_api.get_version_by_id( project_name, - repre_doc["parent"], - fields=["parent"] + repre_entity["versionId"], + fields={"productId"} ) - version_docs = list(get_versions( + version_entities = list(ayon_api.get_versions( project_name, - subset_ids=[repre_version_doc["parent"]], - hero=True + product_ids={repre_version_entity["productId"]}, )) hero_version = None standard_versions = [] - for version_doc in version_docs: - if version_doc["type"] == "hero_version": - hero_version = version_doc + for version_entity in version_entities: + if version_entity["version"] < 0: + hero_version = version_entity else: - standard_versions.append(version_doc) - versions = list(reversed( - sorted(standard_versions, key=lambda item: item["name"]) - )) - if hero_version: - _version_id = hero_version["version_id"] - for _version in versions: - if _version["_id"] != _version_id: - continue - - hero_version["name"] = HeroVersionType( - _version["name"] - ) - hero_version["data"] = _version["data"] - break + standard_versions.append(version_entity) + standard_versions.sort(key=lambda item: item["version"]) + standard_versions.reverse() # Get index among the listed versions current_item = None @@ -653,15 +655,15 @@ class SceneInventoryView(QtWidgets.QTreeView): if isinstance(current_version, HeroVersionType): current_item = hero_version else: - for version in versions: - if version["name"] == current_version: - current_item = version + for version_entity in standard_versions: + if version_entity["version"] == current_version: + current_item = version_entity break all_versions = [] if hero_version: all_versions.append(hero_version) - all_versions.extend(versions) + all_versions.extend(standard_versions) if current_item: index = all_versions.index(current_item) @@ -670,11 +672,10 @@ class SceneInventoryView(QtWidgets.QTreeView): versions_by_label = dict() labels = [] - for version in all_versions: - is_hero = version["type"] == "hero_version" - label = format_version(version["name"], is_hero) + for version_entity in all_versions: + label = format_version(version_entity["version"]) labels.append(label) - versions_by_label[label] = version["name"] + versions_by_label[label] = version_entity["version"] label, state = QtWidgets.QInputDialog.getItem( self, @@ -689,6 +690,8 @@ class SceneInventoryView(QtWidgets.QTreeView): if label: version = versions_by_label[label] + if version < 0: + version = HeroVersionType(version) self._update_containers(items, version) def _show_switch_dialog(self, items): diff --git a/client/ayon_core/tools/sceneinventory/window.py b/client/ayon_core/tools/sceneinventory/window.py index 9584524edd..555db3a17c 100644 --- a/client/ayon_core/tools/sceneinventory/window.py +++ b/client/ayon_core/tools/sceneinventory/window.py @@ -70,7 +70,7 @@ class SceneInventoryWindow(QtWidgets.QDialog): view = SceneInventoryView(controller, self) view.setModel(proxy) - sync_enabled = controller.is_sync_server_enabled() + sync_enabled = controller.is_sitesync_enabled() view.setColumnHidden(model.active_site_col, not sync_enabled) view.setColumnHidden(model.remote_site_col, not sync_enabled) diff --git a/client/ayon_core/tools/texture_copy/app.py b/client/ayon_core/tools/texture_copy/app.py index 120051060b..c288187aac 100644 --- a/client/ayon_core/tools/texture_copy/app.py +++ b/client/ayon_core/tools/texture_copy/app.py @@ -1,12 +1,13 @@ import os import re + import click - import speedcopy +import ayon_api -from ayon_core.client import get_project, get_asset_by_name from ayon_core.lib import Terminal from ayon_core.pipeline import Anatomy +from ayon_core.pipeline.template_data import get_template_data t = Terminal() @@ -24,34 +25,24 @@ class TextureCopy: if os.path.splitext(x)[1].lower() in texture_extensions) return textures - def _get_destination_path(self, asset, project): - project_name = project["name"] - hierarchy = "" - parents = asset['data']['parents'] - if parents and len(parents) > 0: - hierarchy = os.path.join(*parents) + def _get_destination_path(self, folder_entity, project_entity): + project_name = project_entity["name"] product_name = "Main" product_type = "texture" - template_data = { - "project": { - "name": project_name, - "code": project['data']['code'] - }, - "asset": asset["name"], + template_data = get_template_data(project_entity, folder_entity) + template_data.update({ "family": product_type, "subset": product_name, - "folder": { - "name": asset["name"], - }, "product": { "name": product_name, "type": product_type, }, - "hierarchy": hierarchy - } - anatomy = Anatomy(project_name) - template_obj = anatomy.templates_obj["texture"]["path"] + }) + anatomy = Anatomy(project_name, project_entity=project_entity) + template_obj = anatomy.get_template_item( + "publish", "texture", "path" + ) return template_obj.format_strict(template_data) def _get_version(self, path): @@ -78,9 +69,9 @@ class TextureCopy: t.echo("!!! {}".format(e)) exit(1) - def process(self, asset_name, project_name, path): + def process(self, project_name, folder_path, path): """ - Process all textures found in path and copy them to asset under + Process all textures found in path and copy them to folder under project. """ @@ -92,20 +83,24 @@ class TextureCopy: else: t.echo(">>> Found {} textures ...".format(len(textures))) - project = get_project(project_name) - if not project: + project_entity = ayon_api.get_project(project_name) + if not project_entity: t.echo("!!! Project name [ {} ] not found.".format(project_name)) exit(1) - asset = get_asset_by_name(project_name, asset_name) - if not asset: - t.echo("!!! Asset [ {} ] not found in project".format(asset_name)) + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + if not folder_entity: + t.echo( + "!!! Folder [ {} ] not found in project".format(folder_path) + ) exit(1) - t.echo((">>> Project [ {} ] and " - "asset [ {} ] seems to be OK ...").format(project['name'], - asset['name'])) + t.echo( + ( + ">>> Project [ {} ] and folder [ {} ] seems to be OK ..." + ).format(project_entity['name'], folder_entity['path']) + ) - dst_path = self._get_destination_path(asset, project) + dst_path = self._get_destination_path(folder_entity, project_entity) t.echo("--- Using [ {} ] as destination path".format(dst_path)) if not os.path.exists(dst_path): try: @@ -135,15 +130,15 @@ class TextureCopy: @click.command() -@click.option('--asset', required=True) @click.option('--project', required=True) +@click.option('--folder', required=True) @click.option('--path', required=True) -def texture_copy(asset, project, path): +def texture_copy(project, folder, path): t.echo("*** Running Texture tool ***") t.echo(">>> Initializing avalon session ...") os.environ["AYON_PROJECT_NAME"] = project - os.environ["AYON_FOLDER_PATH"] = asset - TextureCopy().process(asset, project, path) + os.environ["AYON_FOLDER_PATH"] = folder + TextureCopy().process(project, folder, path) if __name__ == '__main__': diff --git a/client/ayon_core/tools/tray/tray.py b/client/ayon_core/tools/tray/tray.py index 3c6c529be8..957518afe4 100644 --- a/client/ayon_core/tools/tray/tray.py +++ b/client/ayon_core/tools/tray/tray.py @@ -552,7 +552,7 @@ class TrayStarter(QtCore.QObject): def main(): app = get_ayon_qt_app() - starter = TrayStarter(app) + starter = TrayStarter(app) # noqa F841 if not is_running_from_build() and os.name == "nt": import ctypes diff --git a/client/ayon_core/tools/traypublisher/window.py b/client/ayon_core/tools/traypublisher/window.py index 210e77f0fa..4700e20531 100644 --- a/client/ayon_core/tools/traypublisher/window.py +++ b/client/ayon_core/tools/traypublisher/window.py @@ -16,9 +16,10 @@ from ayon_core.pipeline import install_host from ayon_core.hosts.traypublisher.api import TrayPublisherHost from ayon_core.tools.publisher.control_qt import QtPublisherController from ayon_core.tools.publisher.window import PublisherWindow -from ayon_core.tools.utils import PlaceholderLineEdit, get_ayon_qt_app -from ayon_core.tools.ayon_utils.models import ProjectsModel -from ayon_core.tools.ayon_utils.widgets import ( +from ayon_core.tools.common_models import ProjectsModel +from ayon_core.tools.utils import ( + PlaceholderLineEdit, + get_ayon_qt_app, ProjectsQtModel, ProjectSortFilterProxy, PROJECT_NAME_ROLE, @@ -41,7 +42,6 @@ class TrayPublisherController(QtPublisherController): def reset_hierarchy_cache(self): self._hierarchy_model.reset() - self._asset_docs_cache.reset() def get_project_items(self, sender=None): return self._projects_model.get_project_items(sender) diff --git a/client/ayon_core/tools/utils/__init__.py b/client/ayon_core/tools/utils/__init__.py index 445b4d9b97..4b5fbeaf67 100644 --- a/client/ayon_core/tools/utils/__init__.py +++ b/client/ayon_core/tools/utils/__init__.py @@ -37,6 +37,7 @@ from .lib import ( get_qt_app, get_ayon_qt_app, get_openpype_qt_app, + get_qt_icon, ) from .models import ( @@ -55,6 +56,28 @@ from .dialogs import ( SimplePopup, PopupUpdateKeys, ) +from .projects_widget import ( + ProjectsCombobox, + ProjectsQtModel, + ProjectSortFilterProxy, + PROJECT_NAME_ROLE, + PROJECT_IS_CURRENT_ROLE, + PROJECT_IS_ACTIVE_ROLE, + PROJECT_IS_LIBRARY_ROLE, +) + +from .folders_widget import ( + FoldersWidget, + FoldersQtModel, + FOLDERS_MODEL_SENDER_NAME, + SimpleFoldersWidget, +) + +from .tasks_widget import ( + TasksWidget, + TasksQtModel, + TASKS_MODEL_SENDER_NAME, +) __all__ = ( @@ -96,6 +119,7 @@ __all__ = ( "get_qt_app", "get_ayon_qt_app", "get_openpype_qt_app", + "get_qt_icon", "RecursiveSortFilterProxyModel", @@ -113,4 +137,21 @@ __all__ = ( "ScrollMessageBox", "SimplePopup", "PopupUpdateKeys", + + "ProjectsCombobox", + "ProjectsQtModel", + "ProjectSortFilterProxy", + "PROJECT_NAME_ROLE", + "PROJECT_IS_CURRENT_ROLE", + "PROJECT_IS_ACTIVE_ROLE", + "PROJECT_IS_LIBRARY_ROLE", + + "FoldersWidget", + "FoldersQtModel", + "FOLDERS_MODEL_SENDER_NAME", + "SimpleFoldersWidget", + + "TasksWidget", + "TasksQtModel", + "TASKS_MODEL_SENDER_NAME", ) diff --git a/client/ayon_core/tools/utils/assets_widget.py b/client/ayon_core/tools/utils/assets_widget.py deleted file mode 100644 index 7c3fd8d97c..0000000000 --- a/client/ayon_core/tools/utils/assets_widget.py +++ /dev/null @@ -1,696 +0,0 @@ -import time -import collections - -from qtpy import QtWidgets, QtCore, QtGui -import qtawesome - -from ayon_core.client import ( - get_project, - get_assets, -) -from ayon_core.style import ( - get_default_tools_icon_color, - get_default_entity_icon_color, -) -from ayon_core.tools.flickcharm import FlickCharm - -from .views import ( - TreeViewSpinner, - DeselectableTreeView -) -from .widgets import PlaceholderLineEdit -from .models import RecursiveSortFilterProxyModel -from .lib import ( - DynamicQThread, - get_qta_icon_by_name_and_color -) - -ASSET_ID_ROLE = QtCore.Qt.UserRole + 1 -ASSET_NAME_ROLE = QtCore.Qt.UserRole + 2 -ASSET_LABEL_ROLE = QtCore.Qt.UserRole + 3 -ASSET_UNDERLINE_COLORS_ROLE = QtCore.Qt.UserRole + 4 -ASSET_PATH_ROLE = QtCore.Qt.UserRole + 5 - - -def _get_default_asset_icon_name(has_children): - if has_children: - return "fa.folder" - return "fa.folder-o" - - -def _get_asset_icon_color_from_doc(asset_doc): - if asset_doc: - return asset_doc["data"].get("color") - return None - - -def _get_asset_icon_name_from_doc(asset_doc): - if asset_doc: - return asset_doc["data"].get("icon") - return None - - -def _get_asset_icon_color(asset_doc): - icon_color = _get_asset_icon_color_from_doc(asset_doc) - if icon_color: - return icon_color - return get_default_entity_icon_color() - - -def _get_asset_icon_name(asset_doc, has_children=True): - icon_name = _get_asset_icon_name_from_doc(asset_doc) - if icon_name: - return icon_name - return _get_default_asset_icon_name(has_children) - - -def get_asset_icon(asset_doc, has_children=False): - """Get asset icon. - - Deprecated: - This function will be removed in future releases. Use on your own - risk. - - Args: - asset_doc (dict): Asset document. - has_children (Optional[bool]): Asset has children assets. - - Returns: - QIcon: Asset icon. - - """ - icon_name = _get_asset_icon_name(asset_doc, has_children) - icon_color = _get_asset_icon_color(asset_doc) - - return get_qta_icon_by_name_and_color(icon_name, icon_color) - - -class _AssetsView(TreeViewSpinner, DeselectableTreeView): - """Asset items view. - - Adds abilities to deselect, show loading spinner and add flick charm - (scroll by mouse/touchpad click and move). - """ - - def __init__(self, parent=None): - super(_AssetsView, self).__init__(parent) - self.setIndentation(15) - self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - self.setHeaderHidden(True) - - self._flick_charm_activated = False - self._flick_charm = FlickCharm(parent=self) - self._before_flick_scroll_mode = None - - def activate_flick_charm(self): - if self._flick_charm_activated: - return - self._flick_charm_activated = True - self._before_flick_scroll_mode = self.verticalScrollMode() - self._flick_charm.activateOn(self) - self.setVerticalScrollMode(QtWidgets.QAbstractItemView.ScrollPerPixel) - - def deactivate_flick_charm(self): - if not self._flick_charm_activated: - return - self._flick_charm_activated = False - self._flick_charm.deactivateFrom(self) - if self._before_flick_scroll_mode is not None: - self.setVerticalScrollMode(self._before_flick_scroll_mode) - - def mousePressEvent(self, event): - index = self.indexAt(event.pos()) - if not index.isValid(): - modifiers = QtWidgets.QApplication.keyboardModifiers() - if modifiers == QtCore.Qt.ShiftModifier: - return - elif modifiers == QtCore.Qt.ControlModifier: - return - - super(_AssetsView, self).mousePressEvent(event) - - def set_loading_state(self, loading, empty): - """Change loading state. - - TODO: Separate into 2 individual methods. - - Args: - loading(bool): Is loading. - empty(bool): Is model empty. - """ - if self.is_loading != loading: - if loading: - self.spinner.repaintNeeded.connect( - self.viewport().update - ) - else: - self.spinner.repaintNeeded.disconnect() - self.viewport().update() - - self.is_loading = loading - self.is_empty = empty - - -class _AssetModel(QtGui.QStandardItemModel): - """A model listing assets in the active project. - - The assets are displayed in a treeview, they are visually parented by - a `visualParent` field in the database containing an `_id` to a parent - asset. - - Asset document may have defined label, icon or icon color. - - Loading of data for model happens in thread which means that refresh - is not sequential. When refresh is triggered it is required to listen for - 'refreshed' signal. - - Args: - parent (QObject): Parent Qt object. - """ - - _doc_fetched = QtCore.Signal() - refreshed = QtCore.Signal(bool) - - # Asset document projection - _asset_projection = { - "name": 1, - "parent": 1, - "data.visualParent": 1, - "data.label": 1, - "data.icon": 1, - "data.color": 1 - } - - def __init__(self, parent=None): - super(_AssetModel, self).__init__(parent=parent) - - self._refreshing = False - self._doc_fetching_thread = None - self._doc_fetching_stop = False - self._doc_payload = [] - - self._doc_fetched.connect(self._on_docs_fetched) - - self._item_ids_with_color = set() - self._items_by_asset_id = {} - - self._project_name = None - self._last_project_name = None - - @property - def refreshing(self): - return self._refreshing - - def get_index_by_asset_id(self, asset_id): - item = self._items_by_asset_id.get(asset_id) - if item is not None: - return item.index() - return QtCore.QModelIndex() - - def get_indexes_by_asset_ids(self, asset_ids): - return [ - self.get_index_by_asset_id(asset_id) - for asset_id in asset_ids - ] - - def get_index_by_asset_name(self, asset_name): - indexes = self.get_indexes_by_asset_names([asset_name]) - for index in indexes: - if index.isValid(): - return index - return indexes[0] - - def get_indexes_by_asset_names(self, asset_names): - asset_ids_by_name = { - asset_name: None - for asset_name in asset_names - } - - for asset_id, item in self._items_by_asset_id.items(): - asset_name = item.data(ASSET_NAME_ROLE) - if asset_name in asset_ids_by_name: - asset_ids_by_name[asset_name] = asset_id - - asset_ids = [ - asset_ids_by_name[asset_name] - for asset_name in asset_names - ] - - return self.get_indexes_by_asset_ids(asset_ids) - - def get_project_name(self): - return self._project_name - - def set_project_name(self, project_name, refresh): - if self._project_name == project_name: - return - self._project_name = project_name - if refresh: - self.refresh() - - def refresh(self, force=False): - """Refresh the data for the model. - - Args: - force (bool): Stop currently running refresh start new refresh. - """ - # Skip fetch if there is already other thread fetching documents - if self._refreshing: - if not force: - return - self.stop_refresh() - - project_name = self._project_name - clear_model = False - if project_name != self._last_project_name: - clear_model = True - self._last_project_name = project_name - - if clear_model: - self._clear_items() - - # Fetch documents from mongo - # Restart payload - self._refreshing = True - self._doc_payload = [] - self._doc_fetching_thread = DynamicQThread(self._threaded_fetch) - self._doc_fetching_thread.start() - - def stop_refresh(self): - self._stop_fetch_thread() - - def _clear_items(self): - root_item = self.invisibleRootItem() - root_item.removeRows(0, root_item.rowCount()) - self._items_by_asset_id = {} - self._item_ids_with_color = set() - - def _on_docs_fetched(self): - # Make sure refreshing did not change - # - since this line is refreshing sequential and - # triggering of new refresh will happen when this method is done - if not self._refreshing: - self._clear_items() - return - - self._fill_assets(self._doc_payload) - - self.refreshed.emit(bool(self._items_by_asset_id)) - - self._stop_fetch_thread() - - def _fill_assets(self, asset_docs): - # Collect asset documents as needed - asset_ids = set() - asset_docs_by_id = {} - asset_ids_by_parents = collections.defaultdict(set) - for asset_doc in asset_docs: - asset_id = asset_doc["_id"] - asset_data = asset_doc.get("data") or {} - parent_id = asset_data.get("visualParent") - asset_ids.add(asset_id) - asset_docs_by_id[asset_id] = asset_doc - asset_ids_by_parents[parent_id].add(asset_id) - - # Prepare removed asset ids - removed_asset_ids = ( - set(self._items_by_asset_id.keys()) - set(asset_docs_by_id.keys()) - ) - - # Prepare queue for adding new items - asset_items_queue = collections.deque() - - # Queue starts with root item and 'visualParent' None - root_item = self.invisibleRootItem() - asset_items_queue.append((None, root_item)) - - while asset_items_queue: - # Get item from queue - parent_id, parent_item = asset_items_queue.popleft() - # Skip if there are no children - children_ids = asset_ids_by_parents[parent_id] - - # Go through current children of parent item - # - find out items that were deleted and skip creation of already - # existing items - for row in reversed(range(parent_item.rowCount())): - child_item = parent_item.child(row, 0) - asset_id = child_item.data(ASSET_ID_ROLE) - # Remove item that is not available - if asset_id not in children_ids: - if asset_id in removed_asset_ids: - # Remove and destroy row - parent_item.removeRow(row) - else: - # Just take the row from parent without destroying - parent_item.takeRow(row) - continue - - # Remove asset id from `children_ids` set - # - is used as set for creation of "new items" - children_ids.remove(asset_id) - # Add existing children to queue - asset_items_queue.append((asset_id, child_item)) - - new_items = [] - for asset_id in children_ids: - # Look for item in cache (maybe parent changed) - item = self._items_by_asset_id.get(asset_id) - # Create new item if was not found - if item is None: - item = QtGui.QStandardItem() - item.setEditable(False) - item.setData(asset_id, ASSET_ID_ROLE) - self._items_by_asset_id[asset_id] = item - new_items.append(item) - # Add item to queue - asset_items_queue.append((asset_id, item)) - - if new_items: - parent_item.appendRows(new_items) - - # Remove cache of removed items - for asset_id in removed_asset_ids: - self._items_by_asset_id.pop(asset_id) - - # Refresh data - # - all items refresh all data except id - for asset_id, item in self._items_by_asset_id.items(): - asset_doc = asset_docs_by_id[asset_id] - - asset_name = asset_doc["name"] - if item.data(ASSET_NAME_ROLE) != asset_name: - item.setData(asset_name, ASSET_NAME_ROLE) - - asset_data = asset_doc.get("data") or {} - asset_label = asset_data.get("label") or asset_name - if item.data(ASSET_LABEL_ROLE) != asset_label: - item.setData(asset_label, QtCore.Qt.DisplayRole) - item.setData(asset_label, ASSET_LABEL_ROLE) - - has_children = item.rowCount() > 0 - icon = get_asset_icon(asset_doc, has_children) - item.setData(icon, QtCore.Qt.DecorationRole) - - def _threaded_fetch(self): - asset_docs = self._fetch_asset_docs() - if not self._refreshing: - return - - self._doc_payload = asset_docs - - # Emit doc fetched only if was not stopped - self._doc_fetched.emit() - - def _fetch_asset_docs(self): - project_name = self.get_project_name() - if not project_name: - return [] - - project_doc = get_project(project_name, fields=["_id"]) - if not project_doc: - return [] - - # Get all assets sorted by name - return list( - get_assets(project_name, fields=self._asset_projection.keys()) - ) - - def _stop_fetch_thread(self): - self._refreshing = False - if self._doc_fetching_thread is not None: - while self._doc_fetching_thread.isRunning(): - time.sleep(0.01) - self._doc_fetching_thread = None - - -class _AssetsWidget(QtWidgets.QWidget): - """Base widget to display a tree of assets with filter. - - Assets have only one column and are sorted by name. - - Refreshing of assets happens in thread so calling 'refresh' method - is not sequential. To capture moment when refreshing is finished listen - to 'refreshed' signal. - - To capture selection changes listen to 'selection_changed' signal. It won't - send any information about new selection as it may be different based on - inheritance changes. - - Args: - parent (QWidget): Parent Qt widget. - """ - - # on model refresh - refresh_triggered = QtCore.Signal() - refreshed = QtCore.Signal() - # on view selection change - selection_changed = QtCore.Signal() - # It was double clicked on view - double_clicked = QtCore.Signal() - - def __init__(self, parent=None): - super(_AssetsWidget, self).__init__(parent=parent) - - # Tree View - model = self._create_source_model() - proxy = self._create_proxy_model(model) - - view = _AssetsView(self) - view.setModel(proxy) - - header_widget = QtWidgets.QWidget(self) - - current_asset_icon = qtawesome.icon( - "fa.arrow-down", color=get_default_tools_icon_color() - ) - current_asset_btn = QtWidgets.QPushButton(header_widget) - current_asset_btn.setIcon(current_asset_icon) - current_asset_btn.setToolTip("Go to Asset from current Session") - # Hide by default - current_asset_btn.setVisible(False) - - refresh_icon = qtawesome.icon( - "fa.refresh", color=get_default_tools_icon_color() - ) - refresh_btn = QtWidgets.QPushButton(header_widget) - refresh_btn.setIcon(refresh_icon) - refresh_btn.setToolTip("Refresh items") - - filter_input = PlaceholderLineEdit(header_widget) - filter_input.setPlaceholderText("Filter folders..") - - # Header - header_layout = QtWidgets.QHBoxLayout(header_widget) - header_layout.setContentsMargins(0, 0, 0, 0) - header_layout.addWidget(filter_input) - header_layout.addWidget(current_asset_btn) - header_layout.addWidget(refresh_btn) - - # Make header widgets expand vertically if there is a place - for widget in ( - current_asset_btn, - refresh_btn, - filter_input, - ): - size_policy = widget.sizePolicy() - size_policy.setVerticalPolicy( - QtWidgets.QSizePolicy.MinimumExpanding) - widget.setSizePolicy(size_policy) - - # Layout - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addWidget(header_widget, 0) - layout.addWidget(view, 1) - - # Signals/Slots - filter_input.textChanged.connect(self._on_filter_text_change) - - selection_model = view.selectionModel() - selection_model.selectionChanged.connect(self._on_selection_change) - refresh_btn.clicked.connect(self.refresh) - current_asset_btn.clicked.connect(self._on_current_asset_click) - view.doubleClicked.connect(self.double_clicked) - - self._header_widget = header_widget - self._filter_input = filter_input - self._refresh_btn = refresh_btn - self._current_asset_btn = current_asset_btn - self._model = model - self._proxy = proxy - self._view = view - - self._last_btns_height = None - - self._current_asset_name = None - - self.model_selection = {} - - @property - def header_widget(self): - return self._header_widget - - def get_project_name(self): - self._model.get_project_name() - - def set_project_name(self, project_name, refresh=True): - self._model.set_project_name(project_name, refresh) - - def set_current_asset_name(self, asset_name): - self._current_asset_name = asset_name - - def _create_source_model(self): - model = _AssetModel(parent=self) - model.refreshed.connect(self._on_model_refresh) - return model - - def _create_proxy_model(self, source_model): - proxy = RecursiveSortFilterProxyModel() - proxy.setSourceModel(source_model) - proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive) - proxy.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive) - return proxy - - @property - def refreshing(self): - return self._model.refreshing - - def refresh(self): - self._refresh_model() - - def stop_refresh(self): - self._model.stop_refresh() - - def _get_current_asset_name(self): - return self._current_asset_name - - def _on_current_asset_click(self): - """Trigger change of asset to current context asset. - This separation gives ability to override this method and use it - in differnt way. - """ - - self.select_current_asset() - - def select_current_asset(self): - asset_name = self._get_current_asset_name() - if asset_name: - self.select_asset_by_name(asset_name) - - def set_refresh_btn_visibility(self, visible=None): - """Hide set refresh button. - Some tools may have their global refresh button or do not support - refresh at all. - """ - - if visible is None: - visible = not self._refresh_btn.isVisible() - self._refresh_btn.setVisible(visible) - - def set_current_asset_btn_visibility(self, visible=None): - """Hide set current asset button. - - Not all tools support using of current context asset. - """ - - if visible is None: - visible = not self._current_asset_btn.isVisible() - self._current_asset_btn.setVisible(visible) - - def select_asset(self, asset_id): - index = self._model.get_index_by_asset_id(asset_id) - new_index = self._proxy.mapFromSource(index) - self._select_indexes([new_index]) - - def select_asset_by_name(self, asset_name): - index = self._model.get_index_by_asset_name(asset_name) - new_index = self._proxy.mapFromSource(index) - self._select_indexes([new_index]) - - def activate_flick_charm(self): - self._view.activate_flick_charm() - - def deactivate_flick_charm(self): - self._view.deactivate_flick_charm() - - def _on_selection_change(self): - self.selection_changed.emit() - - def _on_filter_text_change(self, new_text): - self._proxy.setFilterFixedString(new_text) - - def _on_model_refresh(self, has_item): - """This method should be triggered on model refresh. - - Default implementation register this callback in '_create_source_model' - so if you're modifying model keep in mind that this method should be - called when refresh is done. - """ - - self._proxy.sort(0) - self._set_loading_state(loading=False, empty=not has_item) - self.refreshed.emit() - - def _refresh_model(self): - # Store selection - self._set_loading_state(loading=True, empty=True) - - # Trigger signal before refresh is called - self.refresh_triggered.emit() - # Refresh model - self._model.refresh() - - def _set_loading_state(self, loading, empty): - self._view.set_loading_state(loading, empty) - - def _clear_selection(self): - selection_model = self._view.selectionModel() - selection_model.clearSelection() - - def _select_indexes(self, indexes): - valid_indexes = [ - index - for index in indexes - if index.isValid() - ] - if not valid_indexes: - return - - selection_model = self._view.selectionModel() - selection_model.clearSelection() - - mode = ( - QtCore.QItemSelectionModel.Select - | QtCore.QItemSelectionModel.Rows - ) - for index in valid_indexes: - self._view.expand(self._proxy.parent(index)) - selection_model.select(index, mode) - self._view.setCurrentIndex(valid_indexes[0]) - - -class SingleSelectAssetsWidget(_AssetsWidget): - """Single selection asset widget. - - Contain single selection specific api methods. - - Deprecated: - This widget will be removed soon. Please do not use it in new code. - """ - - def get_selected_asset_id(self): - """Currently selected asset id.""" - selection_model = self._view.selectionModel() - indexes = selection_model.selectedRows() - for index in indexes: - return index.data(ASSET_ID_ROLE) - return None - - def get_selected_asset_name(self): - """Currently selected asset name.""" - selection_model = self._view.selectionModel() - indexes = selection_model.selectedRows() - for index in indexes: - return index.data(ASSET_NAME_ROLE) - return None diff --git a/client/ayon_core/tools/utils/color_widgets/color_inputs.py b/client/ayon_core/tools/utils/color_widgets/color_inputs.py index 9c8e7b92e8..795b80fc1e 100644 --- a/client/ayon_core/tools/utils/color_widgets/color_inputs.py +++ b/client/ayon_core/tools/utils/color_widgets/color_inputs.py @@ -562,11 +562,11 @@ class HSLInputs(QtWidgets.QWidget): return self._block_changes = True - h, s, l, _ = self.color.getHsl() + hue, sat, lum, _ = self.color.getHsl() - self.input_hue.setValue(h) - self.input_sat.setValue(s) - self.input_light.setValue(l) + self.input_hue.setValue(hue) + self.input_sat.setValue(sat) + self.input_light.setValue(lum) self._block_changes = False diff --git a/client/ayon_core/tools/ayon_utils/widgets/folders_widget.py b/client/ayon_core/tools/utils/folders_widget.py similarity index 99% rename from client/ayon_core/tools/ayon_utils/widgets/folders_widget.py rename to client/ayon_core/tools/utils/folders_widget.py index e42a5b635c..2ad640de37 100644 --- a/client/ayon_core/tools/ayon_utils/widgets/folders_widget.py +++ b/client/ayon_core/tools/utils/folders_widget.py @@ -3,16 +3,15 @@ import collections from qtpy import QtWidgets, QtGui, QtCore from ayon_core.lib.events import QueuedEventSystem -from ayon_core.tools.ayon_utils.models import ( +from ayon_core.tools.common_models import ( HierarchyModel, HierarchyExpectedSelection, ) -from ayon_core.tools.utils import ( - RecursiveSortFilterProxyModel, - TreeView, -) -from .utils import RefreshThread, get_qt_icon +from .models import RecursiveSortFilterProxyModel +from .views import TreeView +from .lib import RefreshThread, get_qt_icon + FOLDERS_MODEL_SENDER_NAME = "qt_folders_model" FOLDER_ID_ROLE = QtCore.Qt.UserRole + 1 diff --git a/client/ayon_core/tools/utils/host_tools.py b/client/ayon_core/tools/utils/host_tools.py index 8841a377cf..1eff746b9e 100644 --- a/client/ayon_core/tools/utils/host_tools.py +++ b/client/ayon_core/tools/utils/host_tools.py @@ -7,12 +7,9 @@ import os import pyblish.api -from ayon_core.host import IWorkfileHost, ILoadHost +from ayon_core.host import ILoadHost from ayon_core.lib import Logger -from ayon_core.pipeline import ( - registered_host, - get_current_asset_name, -) +from ayon_core.pipeline import registered_host from .lib import qt_app_context diff --git a/client/ayon_core/tools/utils/lib.py b/client/ayon_core/tools/utils/lib.py index e785cec390..d56b370d75 100644 --- a/client/ayon_core/tools/utils/lib.py +++ b/client/ayon_core/tools/utils/lib.py @@ -1,12 +1,12 @@ import os import sys import contextlib +from functools import partial from qtpy import QtWidgets, QtCore, QtGui import qtawesome from ayon_core.style import ( - get_default_entity_icon_color, get_objected_colors, get_app_icon_path, ) @@ -120,12 +120,13 @@ def paint_image_with_color(image, color): return pixmap -def format_version(value, hero_version=False): +def format_version(value): """Formats integer to displayable version name""" - label = "v{0:03d}".format(value) - if not hero_version: - return label - return "[{}]".format(label) + value = int(value) # convert e.g. HeroVersionType to its version value + label = "v{0:03d}".format(abs(value)) + if value < 0: + return "[{}]".format(label) + return label @contextlib.contextmanager @@ -195,79 +196,6 @@ def get_openpype_qt_app(): return get_ayon_qt_app() -class _Cache: - icons = {} - - -def get_qta_icon_by_name_and_color(icon_name, icon_color): - if not icon_name or not icon_color: - return None - - full_icon_name = "{0}-{1}".format(icon_name, icon_color) - if full_icon_name in _Cache.icons: - return _Cache.icons[full_icon_name] - - variants = [icon_name] - qta_instance = qtawesome._instance() - for key in qta_instance.charmap.keys(): - variants.append("{0}.{1}".format(key, icon_name)) - - icon = None - used_variant = None - for variant in variants: - try: - icon = qtawesome.icon(variant, color=icon_color) - used_variant = variant - break - except Exception: - pass - - if used_variant is None: - log.info("Didn't find icon \"{}\"".format(icon_name)) - - elif used_variant != icon_name: - log.debug("Icon \"{}\" was not found \"{}\" is used instead".format( - icon_name, used_variant - )) - - _Cache.icons[full_icon_name] = icon - return icon - - -def get_default_task_icon(color=None): - if color is None: - color = get_default_entity_icon_color() - return get_qta_icon_by_name_and_color("fa.male", color) - - -def get_task_icon(project_doc, asset_doc, task_name): - """Get icon for a task. - - Icon should be defined by task type which is stored on project. - """ - - color = get_default_entity_icon_color() - - tasks_info = asset_doc.get("data", {}).get("tasks") or {} - task_info = tasks_info.get(task_name) or {} - task_icon = task_info.get("icon") - if task_icon: - icon = get_qta_icon_by_name_and_color(task_icon, color) - if icon is not None: - return icon - - task_type = task_info.get("type") - task_types = project_doc["config"]["tasks"] - - task_type_info = task_types.get(task_type) or {} - task_type_icon = task_type_info.get("icon") - if task_type_icon: - icon = get_qta_icon_by_name_and_color(task_icon, color) - if icon is not None: - return icon - return get_default_task_icon(color) - - def iter_model_rows(model, column, include_root=False): """Iterate over all row indices in a model""" indices = [QtCore.QModelIndex()] # start iteration at root @@ -485,3 +413,156 @@ def get_warning_pixmap(color=None): color = get_objected_colors("delete-btn-bg").get_qcolor() return paint_image_with_color(src_image, color) + + +class RefreshThread(QtCore.QThread): + refresh_finished = QtCore.Signal(str) + + def __init__(self, thread_id, func, *args, **kwargs): + super(RefreshThread, self).__init__() + self._id = thread_id + self._callback = partial(func, *args, **kwargs) + self._exception = None + self._result = None + self.finished.connect(self._on_finish_callback) + + @property + def id(self): + return self._id + + @property + def failed(self): + return self._exception is not None + + def run(self): + try: + self._result = self._callback() + except Exception as exc: + self._exception = exc + + def get_result(self): + return self._result + + def _on_finish_callback(self): + """Trigger custom signal with thread id. + + Listening for 'finished' signal we make sure that execution of thread + finished and QThread object can be safely deleted. + """ + + self.refresh_finished.emit(self.id) + + +class _IconsCache: + """Cache for icons.""" + + _cache = {} + _default = None + _qtawesome_cache = {} + + @classmethod + def _get_cache_key(cls, icon_def): + parts = [] + icon_type = icon_def["type"] + if icon_type == "path": + parts = [icon_type, icon_def["path"]] + + elif icon_type == "awesome-font": + parts = [icon_type, icon_def["name"], icon_def["color"]] + return "|".join(parts) + + @classmethod + def get_icon(cls, icon_def): + if not icon_def: + return None + icon_type = icon_def["type"] + cache_key = cls._get_cache_key(icon_def) + cache = cls._cache.get(cache_key) + if cache is not None: + return cache + + icon = None + if icon_type == "path": + path = icon_def["path"] + if os.path.exists(path): + icon = QtGui.QIcon(path) + + elif icon_type == "awesome-font": + icon_name = icon_def["name"] + icon_color = icon_def["color"] + icon = cls.get_qta_icon_by_name_and_color(icon_name, icon_color) + if icon is None: + icon = cls.get_qta_icon_by_name_and_color( + "fa.{}".format(icon_name), icon_color) + if icon is None: + icon = cls.get_default() + cls._cache[cache_key] = icon + return icon + + @classmethod + def get_default(cls): + pix = QtGui.QPixmap(1, 1) + pix.fill(QtCore.Qt.transparent) + return QtGui.QIcon(pix) + + @classmethod + def get_qta_icon_by_name_and_color(cls, icon_name, icon_color): + if not icon_name or not icon_color: + return None + + full_icon_name = "{0}-{1}".format(icon_name, icon_color) + if full_icon_name in cls._qtawesome_cache: + return cls._qtawesome_cache[full_icon_name] + + variants = [icon_name] + qta_instance = qtawesome._instance() + for key in qta_instance.charmap.keys(): + variants.append("{0}.{1}".format(key, icon_name)) + + icon = None + used_variant = None + for variant in variants: + try: + icon = qtawesome.icon(variant, color=icon_color) + used_variant = variant + break + except Exception: + pass + + if used_variant is None: + log.info("Didn't find icon \"{}\"".format(icon_name)) + + elif used_variant != icon_name: + log.debug("Icon \"{}\" was not found \"{}\" is used instead".format( + icon_name, used_variant + )) + + cls._qtawesome_cache[full_icon_name] = icon + return icon + + +def get_qt_icon(icon_def): + """Returns icon from cache or creates new one. + + Args: + icon_def (dict[str, Any]): Icon definition. + + Returns: + QtGui.QIcon: Icon. + + """ + return _IconsCache.get_icon(icon_def) + + +def get_qta_icon_by_name_and_color(icon_name, icon_color): + """Returns icon from cache or creates new one. + + Args: + icon_name (str): Icon name. + icon_color (str): Icon color. + + Returns: + QtGui.QIcon: Icon. + + """ + return _IconsCache.get_qta_icon_by_name_and_color(icon_name, icon_color) diff --git a/client/ayon_core/tools/utils/models.py b/client/ayon_core/tools/utils/models.py index a4b6ad7885..9b32cc5710 100644 --- a/client/ayon_core/tools/utils/models.py +++ b/client/ayon_core/tools/utils/models.py @@ -2,13 +2,7 @@ import re import logging import qtpy -from qtpy import QtCore, QtGui -from ayon_core.client import get_projects -from .constants import ( - PROJECT_IS_ACTIVE_ROLE, - PROJECT_NAME_ROLE, - DEFAULT_PROJECT_LABEL -) +from qtpy import QtCore log = logging.getLogger(__name__) diff --git a/client/ayon_core/tools/ayon_utils/widgets/projects_widget.py b/client/ayon_core/tools/utils/projects_widget.py similarity index 99% rename from client/ayon_core/tools/ayon_utils/widgets/projects_widget.py rename to client/ayon_core/tools/utils/projects_widget.py index 79ffc77640..fd361493ab 100644 --- a/client/ayon_core/tools/ayon_utils/widgets/projects_widget.py +++ b/client/ayon_core/tools/utils/projects_widget.py @@ -1,7 +1,8 @@ from qtpy import QtWidgets, QtCore, QtGui -from ayon_core.tools.ayon_utils.models import PROJECTS_MODEL_SENDER -from .utils import RefreshThread, get_qt_icon +from ayon_core.tools.common_models import PROJECTS_MODEL_SENDER + +from .lib import RefreshThread, get_qt_icon PROJECT_NAME_ROLE = QtCore.Qt.UserRole + 1 PROJECT_IS_ACTIVE_ROLE = QtCore.Qt.UserRole + 2 diff --git a/client/ayon_core/tools/ayon_utils/widgets/tasks_widget.py b/client/ayon_core/tools/utils/tasks_widget.py similarity index 97% rename from client/ayon_core/tools/ayon_utils/widgets/tasks_widget.py rename to client/ayon_core/tools/utils/tasks_widget.py index b273d83fa6..0ff8e8a5c1 100644 --- a/client/ayon_core/tools/ayon_utils/widgets/tasks_widget.py +++ b/client/ayon_core/tools/utils/tasks_widget.py @@ -1,9 +1,9 @@ from qtpy import QtWidgets, QtGui, QtCore from ayon_core.style import get_disabled_entity_icon_color -from ayon_core.tools.utils import DeselectableTreeView -from .utils import RefreshThread, get_qt_icon +from .views import DeselectableTreeView +from .lib import RefreshThread, get_qt_icon TASKS_MODEL_SENDER_NAME = "qt_tasks_model" ITEM_ID_ROLE = QtCore.Qt.UserRole + 1 @@ -381,6 +381,15 @@ class TasksWidget(QtWidgets.QWidget): "task_type": task_type, } + def get_selected_task_id(self): + """Get selected task id. + + Returns: + Union[str, None]: Task id. + + """ + return self.get_selected_task_info()["task_id"] + def get_selected_task_name(self): """Get selected task name. @@ -388,8 +397,7 @@ class TasksWidget(QtWidgets.QWidget): Union[str, None]: Task name. """ - _, _, task_name, _ = self._get_selected_item_ids() - return task_name + return self.get_selected_task_info()["task_name"] def get_selected_task_type(self): """Get selected task type. @@ -398,8 +406,7 @@ class TasksWidget(QtWidgets.QWidget): Union[str, None]: Task type. """ - _, _, _, task_type = self._get_selected_item_ids() - return task_type + return self.get_selected_task_info()["task_type"] def set_selected_task(self, task_name): """Set selected task by name. diff --git a/client/ayon_core/tools/utils/widgets.py b/client/ayon_core/tools/utils/widgets.py index 1d4f85246f..21cab5d682 100644 --- a/client/ayon_core/tools/utils/widgets.py +++ b/client/ayon_core/tools/utils/widgets.py @@ -578,7 +578,8 @@ class OptionalAction(QtWidgets.QWidgetAction): def set_option_tip(self, options): sep = "\n\n" if not options or not isinstance(options[0], AbstractAttrDef): - mak = (lambda opt: opt["name"] + " :\n " + opt["help"]) + def mak(opt): + return opt["name"] + " :\n " + opt["help"] self.option_tip = sep.join(mak(opt) for opt in options) return diff --git a/client/ayon_core/tools/workfile_template_build/lib.py b/client/ayon_core/tools/workfile_template_build/lib.py index de3a0d0084..ffd6fefc38 100644 --- a/client/ayon_core/tools/workfile_template_build/lib.py +++ b/client/ayon_core/tools/workfile_template_build/lib.py @@ -8,12 +8,12 @@ from ayon_core.tools.utils.dialogs import show_message_dialog def open_template_ui(builder, main_window): """Open template from `builder` - Asks user about overwriting current scene and feedsback exceptions. + Asks user about overwriting current scene and feedback exceptions. """ result = QtWidgets.QMessageBox.question( main_window, "Opening template", - "Caution! You will loose unsaved changes.\nDo you want to continue?", + "Caution! You will lose unsaved changes.\nDo you want to continue?", QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No ) if result == QtWidgets.QMessageBox.Yes: diff --git a/client/ayon_core/tools/workfiles/control.py b/client/ayon_core/tools/workfiles/control.py index 86c6a62a11..3048e6be94 100644 --- a/client/ayon_core/tools/workfiles/control.py +++ b/client/ayon_core/tools/workfiles/control.py @@ -3,7 +3,6 @@ import shutil import ayon_api -from ayon_core.client import get_asset_by_id from ayon_core.host import IWorkfileHost from ayon_core.lib import Logger, emit_event from ayon_core.lib.events import QueuedEventSystem @@ -16,7 +15,7 @@ from ayon_core.pipeline.context_tools import ( ) from ayon_core.pipeline.workfile import create_workdir_extra_folders -from ayon_core.tools.ayon_utils.models import ( +from ayon_core.tools.common_models import ( HierarchyModel, HierarchyExpectedSelection, ProjectsModel, @@ -573,6 +572,7 @@ class BaseWorkfileController( workdir, filename, template_key, + src_filepath=representation_filepath ) except Exception: failed = True @@ -635,13 +635,10 @@ class BaseWorkfileController( folder = self.get_folder_entity(project_name, folder_id) if task is None: task = self.get_task_entity(project_name, task_id) - # NOTE keys should are OpenPype compatible return { "project_name": project_name, "folder_id": folder_id, "folder_path": folder["path"], - "asset_id": folder_id, - "asset_name": folder["name"], "task_id": task_id, "task_name": task["name"], "host_name": self.get_host_name(), @@ -662,15 +659,7 @@ class BaseWorkfileController( folder_id != self.get_current_folder_id() or task_name != self.get_current_task_name() ): - # Use OpenPype asset-like object - asset_doc = get_asset_by_id( - event_data["project_name"], - event_data["folder_id"], - ) - change_current_context( - asset_doc, - event_data["task_name"] - ) + self._change_current_context(project_name, folder_id, task_id) self._host_open_workfile(filepath) @@ -712,12 +701,8 @@ class BaseWorkfileController( folder_id != self.get_current_folder_id() or task_name != self.get_current_task_name() ): - # Use OpenPype asset-like object - asset_doc = get_asset_by_id(project_name, folder["id"]) - change_current_context( - asset_doc, - task["name"], - template_key=template_key + self._change_current_context( + project_name, folder_id, task_id, template_key ) # Save workfile @@ -742,4 +727,18 @@ class BaseWorkfileController( # Trigger after save events emit_event("workfile.save.after", event_data, source="workfiles.tool") - self.reset() + + def _change_current_context( + self, project_name, folder_id, task_id, template_key=None + ): + # Change current context + folder_entity = self.get_folder_entity(project_name, folder_id) + task_entity = self.get_task_entity(project_name, task_id) + change_current_context( + folder_entity, + task_entity, + template_key=template_key + ) + self._current_folder_id = folder_entity["id"] + self._current_folder_path = folder_entity["path"] + self._current_task_name = task_entity["name"] diff --git a/client/ayon_core/tools/workfiles/models/workfiles.py b/client/ayon_core/tools/workfiles/models/workfiles.py index 1e9491b3d7..5f59b99b22 100644 --- a/client/ayon_core/tools/workfiles/models/workfiles.py +++ b/client/ayon_core/tools/workfiles/models/workfiles.py @@ -6,12 +6,10 @@ import arrow import ayon_api from ayon_api.operations import OperationsSession -from ayon_core.client import get_project -from ayon_core.client.operations import ( - prepare_workfile_info_update_data, -) from ayon_core.pipeline.template_data import ( get_template_data, + get_task_template_data, + get_folder_template_data, ) from ayon_core.pipeline.workfile import ( get_workdir_with_workdir_data, @@ -26,44 +24,15 @@ from ayon_core.tools.workfiles.abstract import ( ) -def get_folder_template_data(folder): - if not folder: - return {} - parts = folder["path"].split("/") - parts.pop(-1) - hierarchy = "/".join(parts) - return { - "asset": folder["name"], - "folder": { - "name": folder["name"], - "type": folder["folderType"], - "path": folder["path"], - }, - "hierarchy": hierarchy, - } - - -def get_task_template_data(project_entity, task): - if not task: - return {} - short_name = None - task_type_name = task["taskType"] - for task_type_info in project_entity["taskTypes"]: - if task_type_info["name"] == task_type_name: - short_name = task_type_info["shortName"] - break - - return { - "task": { - "name": task["name"], - "type": task_type_name, - "short": short_name, - } - } - - class CommentMatcher(object): - """Use anatomy and work file data to parse comments from filenames""" + """Use anatomy and work file data to parse comments from filenames. + + Args: + extensions (set[str]): Set of extensions. + file_template (AnatomyStringTemplate): File template. + data (dict[str, Any]): Data to fill the template with. + + """ def __init__(self, extensions, file_template, data): self.fname_regex = None @@ -140,7 +109,9 @@ class WorkareaModel: def _get_base_data(self): if self._base_data is None: - base_data = get_template_data(get_project(self.project_name)) + base_data = get_template_data( + ayon_api.get_project(self.project_name) + ) base_data["app"] = self._controller.get_host_name() self._base_data = base_data return copy.deepcopy(self._base_data) @@ -151,7 +122,7 @@ class WorkareaModel: folder = self._controller.get_folder_entity( self.project_name, folder_id ) - fill_data = get_folder_template_data(folder) + fill_data = get_folder_template_data(folder, self.project_name) self._fill_data_by_folder_id[folder_id] = fill_data return copy.deepcopy(fill_data) @@ -227,16 +198,32 @@ class WorkareaModel: task_type = fill_data.get("task", {}).get("type") # TODO cache return get_workfile_template_key( + self.project_name, task_type, self._controller.get_host_name(), - project_name=self.project_name ) def _get_last_workfile_version( self, workdir, file_template, fill_data, extensions ): + """ + + Todos: + Validate if logic of this function is correct. It does return + last version + 1 which might be wrong. + + Args: + workdir (str): Workdir path. + file_template (str): File template. + fill_data (dict[str, Any]): Fill data. + extensions (set[str]): Extensions. + + Returns: + int: Next workfile version. + + """ version = get_last_workfile_with_version( - workdir, str(file_template), fill_data, extensions + workdir, file_template, fill_data, extensions )[1] if version is None: @@ -261,8 +248,21 @@ class WorkareaModel: root, current_filename, ): + """Get comments from root directory. + + Args: + file_template (AnatomyStringTemplate): File template. + extensions (set[str]): Extensions. + fill_data (dict[str, Any]): Fill data. + root (str): Root directory. + current_filename (str): Current filename. + + Returns: + Tuple[list[str], Union[str, None]]: Comment hints and current + comment. + + """ current_comment = None - comment_hints = set() filenames = [] if root and os.path.exists(root): for filename in os.listdir(root): @@ -275,10 +275,11 @@ class WorkareaModel: filenames.append(filename) if not filenames: - return comment_hints, current_comment + return [], current_comment matcher = CommentMatcher(extensions, file_template, fill_data) + comment_hints = set() for filename in filenames: comment = matcher.parse_comment(filename) if comment: @@ -289,23 +290,24 @@ class WorkareaModel: return list(comment_hints), current_comment def _get_workdir(self, anatomy, template_key, fill_data): - template_info = anatomy.templates_obj[template_key] - directory_template = template_info["folder"] + directory_template = anatomy.get_template_item( + "work", template_key, "directory" + ) return directory_template.format_strict(fill_data).normalized() def get_workarea_save_as_data(self, folder_id, task_id): - folder = None - task = None + folder_entity = None + task_entity = None if folder_id: - folder = self._controller.get_folder_entity( + folder_entity = self._controller.get_folder_entity( self.project_name, folder_id ) - if task_id: - task = self._controller.get_task_entity( - self.project_name, task_id - ) + if folder_entity and task_id: + task_entity = self._controller.get_task_entity( + self.project_name, task_id + ) - if not folder or not task: + if not folder_entity or not task_entity: return { "template_key": None, "template_has_version": None, @@ -335,8 +337,13 @@ class WorkareaModel: workdir = self._get_workdir(anatomy, template_key, fill_data) - template_info = anatomy.templates_obj[template_key] - file_template = template_info["file"] + file_template = anatomy.get_template_item( + "work", template_key, "file" + ) + file_template_str = file_template.template + + template_has_version = "{version" in file_template_str + template_has_comment = "{comment" in file_template_str comment_hints, comment = self._get_comments_from_root( file_template, @@ -346,10 +353,8 @@ class WorkareaModel: current_filename, ) last_version = self._get_last_workfile_version( - workdir, file_template, fill_data, extensions) - str_file_template = str(file_template) - template_has_version = "{version" in str_file_template - template_has_comment = "{comment" in str_file_template + workdir, file_template_str, fill_data, extensions + ) return { "template_key": template_key, @@ -372,18 +377,34 @@ class WorkareaModel: version, comment, ): + """Fill workarea filepath based on context. + + Args: + folder_id (str): Folder id. + task_id (str): Task id. + extension (str): File extension. + use_last_version (bool): Use last version. + version (int): Version number. + comment (str): Comment. + + Returns: + WorkareaFilepathResult: Workarea filepath result. + + """ anatomy = self._controller.project_anatomy fill_data = self._prepare_fill_data(folder_id, task_id) + template_key = self._get_template_key(fill_data) workdir = self._get_workdir(anatomy, template_key, fill_data) - template_info = anatomy.templates_obj[template_key] - file_template = template_info["file"] + file_template = anatomy.get_template_item( + "work", template_key, "file" + ) if use_last_version: version = self._get_last_workfile_version( - workdir, file_template, fill_data, self._extensions + workdir, file_template.template, fill_data, self._extensions ) fill_data["version"] = version fill_data["ext"] = extension.lstrip(".") @@ -507,22 +528,24 @@ class WorkfileEntitiesModel: if note is None: return + old_note = workfile_info.get("attrib", {}).get("note") + new_workfile_info = copy.deepcopy(workfile_info) attrib = new_workfile_info.setdefault("attrib", {}) attrib["description"] = note - update_data = prepare_workfile_info_update_data( - workfile_info, new_workfile_info - ) self._cache[identifier] = new_workfile_info self._items.pop(identifier, None) - if not update_data: + if old_note == note: return project_name = self._controller.get_current_project_name() session = OperationsSession() session.update_entity( - project_name, "workfile", workfile_info["id"], update_data + project_name, + "workfile", + workfile_info["id"], + {"attrib": {"description": note}}, ) session.commit() @@ -617,7 +640,7 @@ class PublishWorkfilesModel: def get_file_items(self, folder_id, task_name): # TODO refactor to use less server API calls project_name = self._controller.get_current_project_name() - # Get subset docs of asset + # Get subset docs of folder product_entities = ayon_api.get_products( project_name, folder_ids=[folder_id], diff --git a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py index 6a1572deb2..fe6abee951 100644 --- a/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py +++ b/client/ayon_core/tools/workfiles/widgets/files_widget_workarea.py @@ -20,6 +20,8 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): controller (AbstractWorkfilesFrontend): The control object. """ + refreshed = QtCore.Signal() + def __init__(self, controller): super(WorkAreaFilesModel, self).__init__() @@ -163,6 +165,12 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel): self._fill_items() def _fill_items(self): + try: + self._fill_items_impl() + finally: + self.refreshed.emit() + + def _fill_items_impl(self): folder_id = self._selected_folder_id task_id = self._selected_task_id if not folder_id or not task_id: @@ -285,6 +293,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): selection_model.selectionChanged.connect(self._on_selection_change) view.double_clicked.connect(self._on_mouse_double_click) view.customContextMenuRequested.connect(self._on_context_menu) + model.refreshed.connect(self._on_model_refresh) controller.register_event_callback( "expected_selection_changed", @@ -298,6 +307,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): self._controller = controller self._published_mode = False + self._change_selection_on_refresh = True def set_published_mode(self, published_mode): """Set the published mode. @@ -379,7 +389,9 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): if not workfile_info["current"]: return + self._change_selection_on_refresh = False self._model.refresh() + self._change_selection_on_refresh = True workfile_name = workfile_info["name"] if ( @@ -394,3 +406,30 @@ class WorkAreaFilesWidget(QtWidgets.QWidget): self._controller.expected_workfile_selected( event["folder"]["id"], event["task"]["name"], workfile_name ) + + def _on_model_refresh(self): + if ( + not self._change_selection_on_refresh + or self._proxy_model.rowCount() < 1 + ): + return + + # Find the row with latest date modified + latest_index = max( + ( + self._proxy_model.index(idx, 0) + for idx in range(self._proxy_model.rowCount()) + ), + key=lambda model_index: model_index.data(DATE_MODIFIED_ROLE) + ) + + # Select row of latest modified + selection_model = self._view.selectionModel() + selection_model.select( + latest_index, + ( + QtCore.QItemSelectionModel.ClearAndSelect + | QtCore.QItemSelectionModel.Current + | QtCore.QItemSelectionModel.Rows + ) + ) diff --git a/client/ayon_core/tools/workfiles/widgets/window.py b/client/ayon_core/tools/workfiles/widgets/window.py index 86a84b6195..1cfae7ec90 100644 --- a/client/ayon_core/tools/workfiles/widgets/window.py +++ b/client/ayon_core/tools/workfiles/widgets/window.py @@ -6,9 +6,13 @@ from ayon_core.tools.utils import ( MessageOverlayObject, ) -from ayon_core.tools.ayon_utils.widgets import FoldersWidget, TasksWidget from ayon_core.tools.workfiles.control import BaseWorkfileController -from ayon_core.tools.utils import GoToCurrentButton, RefreshButton +from ayon_core.tools.utils import ( + GoToCurrentButton, + RefreshButton, + FoldersWidget, + TasksWidget, +) from .side_panel import SidePanelWidget from .files_widget import FilesWidget @@ -114,11 +118,11 @@ class WorkfilesToolWindow(QtWidgets.QWidget): overlay_invalid_host = InvalidHostOverlay(self) overlay_invalid_host.setVisible(False) - first_show_timer = QtCore.QTimer() - first_show_timer.setSingleShot(True) - first_show_timer.setInterval(50) + show_timer = QtCore.QTimer() + show_timer.setSingleShot(True) + show_timer.setInterval(50) - first_show_timer.timeout.connect(self._on_first_show) + show_timer.timeout.connect(self._on_show) controller.register_event_callback( "save_as.finished", @@ -155,7 +159,7 @@ class WorkfilesToolWindow(QtWidgets.QWidget): self._tasks_widget = tasks_widget self._side_panel = side_panel - self._first_show_timer = first_show_timer + self._show_timer = show_timer self._post_init() @@ -283,9 +287,9 @@ class WorkfilesToolWindow(QtWidgets.QWidget): def showEvent(self, event): super(WorkfilesToolWindow, self).showEvent(event) + self._show_timer.start() if self._first_show: self._first_show = False - self._first_show_timer.start() self.setStyleSheet(style.load_stylesheet()) def keyPressEvent(self, event): @@ -299,9 +303,8 @@ class WorkfilesToolWindow(QtWidgets.QWidget): pass - def _on_first_show(self): - if not self._controller_refreshed: - self.refresh() + def _on_show(self): + self.refresh() def _on_file_text_filter_change(self, text): self._files_widget.set_text_filter(text) diff --git a/client/ayon_core/vendor/python/common/pysync.py b/client/ayon_core/vendor/python/common/pysync.py deleted file mode 100644 index 14a6dda34c..0000000000 --- a/client/ayon_core/vendor/python/common/pysync.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/local/bin/python3 -# https://github.com/snullp/pySync/blob/master/pySync.py - -import sys -import shutil -import os -import time -import configparser -from os.path import ( - getsize, - getmtime, - isfile, - isdir, - join, - abspath, - expanduser, - realpath -) -import logging - -log = logging.getLogger(__name__) - -ignoreFiles = ("Thumbs.db", ".DS_Store") - -# this feature is not yet implemented -ignorePaths = [] - -if os.name == 'nt': - # msvcrt can't function correctly in IDLE - if 'idlelib.run' in sys.modules: - print("Please don't run this script in IDLE.") - sys.exit(0) - import msvcrt - - def flush_input(str, set=None): - if not set: - while msvcrt.kbhit(): - ch = msvcrt.getch() - if ch == '\xff': - print("msvcrt is broken, this is weird.") - sys.exit(0) - return input(str) - else: - return set -else: - import select - - def flush_input(str, set=None): - if not set: - while len(select.select([sys.stdin.fileno()], [], [], 0.0)[0]) > 0: - os.read(sys.stdin.fileno(), 4096) - return input(str) - else: - return set - - -def compare(fa, fb, options_input=[]): - if isfile(fa) == isfile(fb): - if isdir(fa): - walktree(fa, fb, options_input) - elif isfile(fa): - if getsize(fa) != getsize(fb) \ - or int(getmtime(fa)) != int(getmtime(fb)): - log.info(str((fa, ': size=', getsize(fa), 'mtime=', - time.asctime(time.localtime(getmtime(fa)))))) - log.info(str((fb, ': size=', getsize(fb), 'mtime=', - time.asctime(time.localtime(getmtime(fb)))))) - if getmtime(fa) > getmtime(fb): - act = '>' - else: - act = '<' - - set = [i for i in options_input if i in [">", "<"]][0] - - s = flush_input('What to do?(>,<,r,n)[' + act + ']', set=set) - if len(s) > 0: - act = s[0] - if act == '>': - shutil.copy2(fa, fb) - elif act == '<': - shutil.copy2(fb, fa) - elif act == 'r': - if isdir(fa): - shutil.rmtree(fa) - elif isfile(fa): - os.remove(fa) - else: - log.info(str(('Remove: Skipping', fa))) - if isdir(fb): - shutil.rmtree(fb) - elif isfile(fb): - os.remove(fb) - else: - log.info(str(('Remove: Skipping', fb))) - - else: - log.debug(str(('Compare: Skipping non-dir and non-file', fa))) - else: - log.error(str(('Error:', fa, ',', fb, 'have different file type'))) - - -def copy(fa, fb, options_input=[]): - set = [i for i in options_input if i in ["y"]][0] - s = flush_input('Copy ' + fa + ' to another side?(r,y,n)[y]', set=set) - if len(s) > 0: - act = s[0] - else: - act = 'y' - if act == 'y': - if isdir(fa): - shutil.copytree(fa, fb) - elif isfile(fa): - shutil.copy2(fa, fb) - else: - log.debug(str(('Copy: Skipping ', fa))) - elif act == 'r': - if isdir(fa): - shutil.rmtree(fa) - elif isfile(fa): - os.remove(fa) - else: - log.debug(str(('Remove: Skipping ', fa))) - - -stoentry = [] -tarentry = [] - - -def walktree(source, target, options_input=[]): - srclist = os.listdir(source) - tarlist = os.listdir(target) - if '!sync' in srclist: - return - if '!sync' in tarlist: - return - # files in source dir... - for f in srclist: - if f in ignoreFiles: - continue - spath = join(source, f) - tpath = join(target, f) - if spath in ignorePaths: - continue - if spath in stoentry: - # just in case target also have this one - if f in tarlist: - del tarlist[tarlist.index(f)] - continue - - # if also exists in target dir - if f in tarlist: - del tarlist[tarlist.index(f)] - compare(spath, tpath, options_input) - - # exists in source dir only - else: - copy(spath, tpath, options_input) - - # exists in target dir only - set = [i for i in options_input if i in ["<"]] - - for f in tarlist: - if f in ignoreFiles: - continue - spath = join(source, f) - tpath = join(target, f) - if tpath in ignorePaths: - continue - if tpath in tarentry: - continue - if set: - copy(tpath, spath, options_input) - else: - print("REMOVING: {}".format(f)) - if os.path.isdir(tpath): - shutil.rmtree(tpath) - else: - os.remove(tpath) - print("REMOVING: {}".format(f)) - - -if __name__ == '__main__': - stoconf = configparser.RawConfigParser() - tarconf = configparser.RawConfigParser() - stoconf.read("pySync.ini") - tarconf.read(expanduser("~/.pysync")) - stoname = stoconf.sections()[0] - tarname = tarconf.sections()[0] - - # calculate storage's base folder - if stoconf.has_option(stoname, 'BASE'): - stobase = abspath(stoconf.get(stoname, 'BASE')) - stoconf.remove_option(stoname, 'BASE') - else: - stobase = os.getcwd() - - # same, for target's base folder - if tarconf.has_option(tarname, 'BASE'): - tarbase = abspath(tarconf.get(tarname, 'BASE')) - tarconf.remove_option(tarname, 'BASE') - else: - tarbase = expanduser('~/') - - print("Syncing between", stoname, "and", tarname) - sto_content = {x: realpath(join(stobase, stoconf.get(stoname, x))) - for x in stoconf.options(stoname)} - tar_content = {x: realpath(join(tarbase, tarconf.get(tarname, x))) - for x in tarconf.options(tarname)} - stoentry = [sto_content[x] for x in sto_content] - tarentry = [tar_content[x] for x in tar_content] - - for folder in sto_content: - if folder in tar_content: - print('Processing', folder) - walktree(sto_content[folder], tar_content[folder], options_input) - print("Done.") diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index f3ad9713d5..a60de0493a 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON core addon version.""" -__version__ = "0.3.0-dev.1" +__version__ = "0.3.1-dev.1" diff --git a/client/ayon_core/widgets/__init__.py b/client/ayon_core/widgets/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/client/ayon_core/widgets/password_dialog.py b/client/ayon_core/widgets/password_dialog.py deleted file mode 100644 index a4c50128ff..0000000000 --- a/client/ayon_core/widgets/password_dialog.py +++ /dev/null @@ -1,33 +0,0 @@ -# TODO remove - kept for kitsu addon which imported it -from qtpy import QtWidgets, QtCore, QtGui - - -class PressHoverButton(QtWidgets.QPushButton): - """ - Deprecated: - Use `openpype.tools.utils.PressHoverButton` instead. - """ - _mouse_pressed = False - _mouse_hovered = False - change_state = QtCore.Signal(bool) - - def mousePressEvent(self, event): - self._mouse_pressed = True - self._mouse_hovered = True - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mousePressEvent(event) - - def mouseReleaseEvent(self, event): - self._mouse_pressed = False - self._mouse_hovered = False - self.change_state.emit(self._mouse_hovered) - super(PressHoverButton, self).mouseReleaseEvent(event) - - def mouseMoveEvent(self, event): - mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) - under_mouse = self.rect().contains(mouse_pos) - if under_mouse != self._mouse_hovered: - self._mouse_hovered = under_mouse - self.change_state.emit(self._mouse_hovered) - - super(PressHoverButton, self).mouseMoveEvent(event) diff --git a/client/pyproject.toml b/client/pyproject.toml index 7b4329a31a..1a0ad7e5f2 100644 --- a/client/pyproject.toml +++ b/client/pyproject.toml @@ -4,19 +4,19 @@ description="AYON core addon." [tool.poetry.dependencies] python = ">=3.9.1,<3.10" -aiohttp_json_rpc = "*" # TVPaint server -aiohttp-middlewares = "^2.0.0" -wsrpc_aiohttp = "^3.1.1" # websocket server -Click = "^8" clique = "1.6.*" jsonschema = "^2.6.0" pyblish-base = "^1.8.11" -pynput = "^1.7.2" # Timers manager - TODO remove speedcopy = "^2.1" six = "^1.15" qtawesome = "0.7.3" [ayon.runtimeDependencies] +aiohttp_json_rpc = "*" # TVPaint server +aiohttp-middlewares = "^2.0.0" +wsrpc_aiohttp = "^3.1.1" # websocket server +Click = "^8" OpenTimelineIO = "0.14.1" opencolorio = "2.2.1" Pillow = "9.5.0" +pynput = "^1.7.2" # Timers manager - TODO remove diff --git a/package.py b/package.py index 470bbf256b..79450d029f 100644 --- a/package.py +++ b/package.py @@ -1,6 +1,6 @@ name = "core" title = "Core" -version = "0.3.0-dev.1" +version = "0.3.1-dev.1" client_dir = "ayon_core" diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000..be5a3b2c2c --- /dev/null +++ b/poetry.lock @@ -0,0 +1,564 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = "*" +files = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] + +[[package]] +name = "ayon-python-api" +version = "1.0.1" +description = "AYON Python API" +optional = false +python-versions = "*" +files = [ + {file = "ayon-python-api-1.0.1.tar.gz", hash = "sha256:6a53af84903317e2097f3c6bba0094e90d905d6670fb9c7d3ad3aa9de6552bc1"}, + {file = "ayon_python_api-1.0.1-py3-none-any.whl", hash = "sha256:d4b649ac39c9003cdbd60f172c0d35f05d310fba3a0649b6d16300fe67f967d6"}, +] + +[package.dependencies] +appdirs = ">=1,<2" +requests = ">=2.27.1" +six = ">=1.15" +Unidecode = ">=1.2.0" + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "codespell" +version = "2.2.6" +description = "Codespell" +optional = false +python-versions = ">=3.8" +files = [ + {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, + {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, +] + +[package.extras] +dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] +hard-encoding-detection = ["chardet"] +toml = ["tomli"] +types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "identify" +version = "2.5.35" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.6.2" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.2-py2.py3-none-any.whl", hash = "sha256:ba637c2d7a670c10daedc059f5c49b5bd0aadbccfcd7ec15592cf9665117532c"}, + {file = "pre_commit-3.6.2.tar.gz", hash = "sha256:c3ef34f463045c88658c5b99f38c1e297abdcc0ff13f98d3370055fbbfabc67e"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-print" +version = "1.0.0" +description = "pytest-print adds the printer fixture you can use to print messages to the user (directly to the pytest runner, not stdout)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_print-1.0.0-py3-none-any.whl", hash = "sha256:23484f42b906b87e31abd564761efffeb0348a6f83109fb857ee6e8e5df42b69"}, + {file = "pytest_print-1.0.0.tar.gz", hash = "sha256:1fcde9945fba462227a8959271369b10bb7a193be8452162707e63cd60875ca0"}, +] + +[package.dependencies] +pytest = ">=7.4" + +[package.extras] +test = ["covdefaults (>=2.3)", "coverage (>=7.3)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.3.3" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:973a0e388b7bc2e9148c7f9be8b8c6ae7471b9be37e1cc732f8f44a6f6d7720d"}, + {file = "ruff-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfa60d23269d6e2031129b053fdb4e5a7b0637fc6c9c0586737b962b2f834493"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eca7ff7a47043cf6ce5c7f45f603b09121a7cc047447744b029d1b719278eb5"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7d3f6762217c1da954de24b4a1a70515630d29f71e268ec5000afe81377642d"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b24c19e8598916d9c6f5a5437671f55ee93c212a2c4c569605dc3842b6820386"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5a6cbf216b69c7090f0fe4669501a27326c34e119068c1494f35aaf4cc683778"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352e95ead6964974b234e16ba8a66dad102ec7bf8ac064a23f95371d8b198aab"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6ab88c81c4040a817aa432484e838aaddf8bfd7ca70e4e615482757acb64f8"}, + {file = "ruff-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79bca3a03a759cc773fca69e0bdeac8abd1c13c31b798d5bb3c9da4a03144a9f"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2700a804d5336bcffe063fd789ca2c7b02b552d2e323a336700abb8ae9e6a3f8"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd66469f1a18fdb9d32e22b79f486223052ddf057dc56dea0caaf1a47bdfaf4e"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45817af234605525cdf6317005923bf532514e1ea3d9270acf61ca2440691376"}, + {file = "ruff-0.3.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0da458989ce0159555ef224d5b7c24d3d2e4bf4c300b85467b08c3261c6bc6a8"}, + {file = "ruff-0.3.3-py3-none-win32.whl", hash = "sha256:f2831ec6a580a97f1ea82ea1eda0401c3cdf512cf2045fa3c85e8ef109e87de0"}, + {file = "ruff-0.3.3-py3-none-win_amd64.whl", hash = "sha256:be90bcae57c24d9f9d023b12d627e958eb55f595428bafcb7fec0791ad25ddfc"}, + {file = "ruff-0.3.3-py3-none-win_arm64.whl", hash = "sha256:0171aab5fecdc54383993389710a3d1227f2da124d76a2784a7098e818f92d61"}, + {file = "ruff-0.3.3.tar.gz", hash = "sha256:38671be06f57a2f8aba957d9f701ea889aa5736be806f18c0cd03d6ff0cbca8d"}, +] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +description = "ASCII transliterations of Unicode text" +optional = false +python-versions = ">=3.5" +files = [ + {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, + {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.25.1" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.9.1,<3.10" +content-hash = "1bb724694792fbc2b3c05e3355e6c25305d9f4034eb7b1b4b1791ee95427f8d2" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 0000000000..62e2dff2a2 --- /dev/null +++ b/poetry.toml @@ -0,0 +1,3 @@ +[virtualenvs] +in-project = true +create = true diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..c1f6ddfb0b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,117 @@ +# WARNING: This file is used only for development done on this addon. +# Be aware that dependencies used here might not match the ones used by +# the specific addon bundle set up on the AYON server. This file should +# be used only for local development and CI/CD purposes. + +[tool.poetry] +name = "ayon-core" +version = "0.3.1" +description = "" +authors = ["Ynput Team "] +readme = "README.md" + +[tool.poetry.dependencies] +python = ">=3.9.1,<3.10" + + +[tool.poetry.dev-dependencies] +# test dependencies +pytest = "^8.0" +pytest-print = "^1.0" +ayon-python-api = "^1.0" +# linting dependencies +ruff = "^0.3.3" +pre-commit = "^3.6.2" +codespell = "^2.2.6" + + +[tool.ruff] +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", + "vendor", + "generated", +] + +# Same as Black. +line-length = 79 +indent-width = 4 + +# Assume Python 3.9 +target-version = "py39" + +[tool.ruff.lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +select = ["E4", "E7", "E9", "F"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +exclude = [ + "client/ayon_core/hosts/unreal/integration/*", + "client/ayon_core/hosts/aftereffects/api/extension/js/libs/*", + "client/ayon_core/hosts/hiero/api/startup/*", + "client/ayon_core/modules/deadline/repository/custom/plugins/CelAction/*", + "client/ayon_core/modules/deadline/repository/custom/plugins/HarmonyAYON/*", + "client/ayon_core/modules/click_wrap.py", + "client/ayon_core/scripts/slates/__init__.py" +] + +[tool.ruff.lint.per-file-ignores] +"client/ayon_core/lib/__init__.py" = ["E402"] +"client/ayon_core/hosts/max/startup/startup.py" = ["E402"] + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +[tool.codespell] +# Ignore words that are not in the dictionary. +ignore-words-list = "ayon,ynput,parms,parm,hda,developpement" + +skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/extension/js/libs/*" +count = true +quiet-level = 3 + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index 9b5f3ae571..e61bf6986b 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -9,7 +9,7 @@ from ayon_server.settings import ( task_types_enum, ) -from ayon_server.types import ColorRGB_uint8, ColorRGBA_uint8 +from ayon_server.types import ColorRGBA_uint8 class ValidateBaseModel(BaseSettingsModel): @@ -221,7 +221,12 @@ class OIIOToolArgumentsModel(BaseSettingsModel): class ExtractOIIOTranscodeOutputModel(BaseSettingsModel): _layout = "expanded" - name: str = SettingsField("", title="Name") + name: str = SettingsField( + "", + title="Name", + description="Output name (no space)", + regex=r"[a-zA-Z0-9_]([a-zA-Z0-9_\.\-]*[a-zA-Z0-9_])?$", + ) extension: str = SettingsField("", title="Extension") transcoding_type: str = SettingsField( "colorspace", @@ -424,7 +429,7 @@ class ExtractReviewOutputDefModel(BaseSettingsModel): title="Scale pixel aspect", description=( "Rescale input when it's pixel aspect ratio is not 1." - " Usefull for anamorph reviews." + " Useful for anamorphic reviews." ) ) bg_color: ColorRGBA_uint8 = SettingsField( diff --git a/server/settings/tools.py b/server/settings/tools.py index b45f9b49d4..fb8430a71c 100644 --- a/server/settings/tools.py +++ b/server/settings/tools.py @@ -173,6 +173,7 @@ def _product_types_enum(): "rig", "setdress", "take", + "usd", "usdShade", "vdbcache", "vrayproxy", @@ -410,14 +411,14 @@ DEFAULT_TOOLS_VALUES = { { "task_types": [], "hosts": [], - "workfile_template": "work" + "workfile_template": "default" }, { "task_types": [], "hosts": [ "unreal" ], - "workfile_template": "work_unreal" + "workfile_template": "unreal" } ], "last_workfile_on_startup": [ @@ -457,7 +458,7 @@ DEFAULT_TOOLS_VALUES = { "hosts": [], "task_types": [], "task_names": [], - "template_name": "publish" + "template_name": "default" }, { "product_types": [ @@ -468,7 +469,7 @@ DEFAULT_TOOLS_VALUES = { "hosts": [], "task_types": [], "task_names": [], - "template_name": "publish_render" + "template_name": "render" }, { "product_types": [ @@ -479,7 +480,7 @@ DEFAULT_TOOLS_VALUES = { ], "task_types": [], "task_names": [], - "template_name": "publish_simpleUnrealTexture" + "template_name": "simpleUnrealTexture" }, { "product_types": [ @@ -491,7 +492,7 @@ DEFAULT_TOOLS_VALUES = { ], "task_types": [], "task_names": [], - "template_name": "publish_maya2unreal" + "template_name": "maya2unreal" }, { "product_types": [ @@ -502,7 +503,7 @@ DEFAULT_TOOLS_VALUES = { ], "task_types": [], "task_names": [], - "template_name": "publish_online" + "template_name": "online" }, { "product_types": [ @@ -513,7 +514,7 @@ DEFAULT_TOOLS_VALUES = { ], "task_types": [], "task_names": [], - "template_name": "publish_tycache" + "template_name": "tycache" } ], "hero_template_name_profiles": [ @@ -526,7 +527,7 @@ DEFAULT_TOOLS_VALUES = { ], "task_types": [], "task_names": [], - "template_name": "hero_simpleUnrealTextureHero" + "template_name": "simpleUnrealTextureHero" } ] } diff --git a/server_addon/aftereffects/package.py b/server_addon/aftereffects/package.py new file mode 100644 index 0000000000..a680b37602 --- /dev/null +++ b/server_addon/aftereffects/package.py @@ -0,0 +1,3 @@ +name = "aftereffects" +title = "AfterEffects" +version = "0.1.3" diff --git a/server_addon/aftereffects/server/__init__.py b/server_addon/aftereffects/server/__init__.py index e14e76e9db..76e6d5b2eb 100644 --- a/server_addon/aftereffects/server/__init__.py +++ b/server_addon/aftereffects/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon from .settings import AfterEffectsSettings, DEFAULT_AFTEREFFECTS_SETTING -from .version import __version__ class AfterEffects(BaseServerAddon): - name = "aftereffects" - title = "AfterEffects" - version = __version__ - settings_model = AfterEffectsSettings async def get_default_settings(self): diff --git a/server_addon/aftereffects/server/version.py b/server_addon/aftereffects/server/version.py deleted file mode 100644 index e57ad00718..0000000000 --- a/server_addon/aftereffects/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.3" diff --git a/server_addon/applications/client/ayon_applications/__init__.py b/server_addon/applications/client/ayon_applications/__init__.py new file mode 100644 index 0000000000..c9b72f9914 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/__init__.py @@ -0,0 +1,59 @@ +from .constants import ( + APPLICATIONS_ADDON_ROOT, + DEFAULT_ENV_SUBGROUP, + PLATFORM_NAMES, +) +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, + ApplicationLaunchFailed, + MissingRequiredKey, +) +from .defs import ( + LaunchTypes, + ApplicationExecutable, + UndefinedApplicationExecutable, + ApplicationGroup, + Application, + EnvironmentToolGroup, + EnvironmentTool, +) +from .hooks import ( + LaunchHook, + PreLaunchHook, + PostLaunchHook, +) +from .manager import ( + ApplicationManager, + ApplicationLaunchContext, +) +from .addon import ApplicationsAddon + + +__all__ = ( + "APPLICATIONS_ADDON_ROOT", + "DEFAULT_ENV_SUBGROUP", + "PLATFORM_NAMES", + + "ApplicationNotFound", + "ApplicationExecutableNotFound", + "ApplicationLaunchFailed", + "MissingRequiredKey", + + "LaunchTypes", + "ApplicationExecutable", + "UndefinedApplicationExecutable", + "ApplicationGroup", + "Application", + "EnvironmentToolGroup", + "EnvironmentTool", + + "LaunchHook", + "PreLaunchHook", + "PostLaunchHook", + + "ApplicationManager", + "ApplicationLaunchContext", + + "ApplicationsAddon", +) diff --git a/server_addon/applications/client/ayon_applications/addon.py b/server_addon/applications/client/ayon_applications/addon.py new file mode 100644 index 0000000000..0f1b68af0e --- /dev/null +++ b/server_addon/applications/client/ayon_applications/addon.py @@ -0,0 +1,173 @@ +import os +import json + +from ayon_core.addon import AYONAddon, IPluginPaths, click_wrap + +from .constants import APPLICATIONS_ADDON_ROOT +from .defs import LaunchTypes +from .manager import ApplicationManager + + +class ApplicationsAddon(AYONAddon, IPluginPaths): + name = "applications" + + def initialize(self, settings): + # TODO remove when addon is removed from ayon-core + self.enabled = self.name in settings + + def get_app_environments_for_context( + self, + project_name, + folder_path, + task_name, + full_app_name, + env_group=None, + launch_type=None, + env=None, + ): + """Calculate environment variables for launch context. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + full_app_name (str): Full application name. + env_group (Optional[str]): Environment group. + launch_type (Optional[str]): Launch type. + env (Optional[dict[str, str]]): Environment variables to update. + + Returns: + dict[str, str]: Environment variables for context. + + """ + from ayon_applications.utils import get_app_environments_for_context + + if not full_app_name: + return {} + + return get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=self.manager + ) + + def get_farm_publish_environment_variables( + self, + project_name, + folder_path, + task_name, + full_app_name=None, + env_group=None, + ): + """Calculate environment variables for farm publish. + + Args: + project_name (str): Project name. + folder_path (str): Folder path. + task_name (str): Task name. + env_group (Optional[str]): Environment group. + full_app_name (Optional[str]): Full application name. Value from + environment variable 'AYON_APP_NAME' is used if 'None' is + passed. + + Returns: + dict[str, str]: Environment variables for farm publish. + + """ + if full_app_name is None: + full_app_name = os.getenv("AYON_APP_NAME") + + return self.get_app_environments_for_context( + project_name, + folder_path, + task_name, + full_app_name, + env_group=env_group, + launch_type=LaunchTypes.farm_publish + ) + + def get_applications_manager(self, settings=None): + """Get applications manager. + + Args: + settings (Optional[dict]): Studio/project settings. + + Returns: + ApplicationManager: Applications manager. + + """ + return ApplicationManager(settings) + + def get_plugin_paths(self): + return { + "publish": [ + os.path.join(APPLICATIONS_ADDON_ROOT, "plugins", "publish") + ] + } + + # --- CLI --- + def cli(self, addon_click_group): + main_group = click_wrap.group( + self._cli_main, name=self.name, help="Applications addon" + ) + ( + main_group.command( + self._cli_extract_environments, + name="extractenvironments", + help=( + "Extract environment variables for context into json file" + ) + ) + .argument("output_json_path") + .option("--project", help="Project name", default=None) + .option("--folder", help="Folder path", default=None) + .option("--task", help="Task name", default=None) + .option("--app", help="Application name", default=None) + .option( + "--envgroup", + help="Environment group (e.g. \"farm\")", + default=None + ) + ) + # Convert main command to click object and add it to parent group + addon_click_group.add_command( + main_group.to_click_obj() + ) + + def _cli_main(self): + pass + + def _cli_extract_environments( + self, output_json_path, project, folder, task, app, envgroup + ): + """Produces json file with environment based on project and app. + + Called by farm integration to propagate environment into farm jobs. + + Args: + output_json_path (str): Output json file path. + project (str): Project name. + folder (str): Folder path. + task (str): Task name. + app (str): Full application name e.g. 'maya/2024'. + envgroup (str): Environment group. + + """ + if all((project, folder, task, app)): + env = self.get_farm_publish_environment_variables( + project, folder, task, app, env_group=envgroup, + ) + else: + env = os.environ.copy() + + output_dir = os.path.dirname(output_json_path) + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + with open(output_json_path, "w") as file_stream: + json.dump(env, file_stream, indent=4) diff --git a/server_addon/applications/client/ayon_applications/constants.py b/server_addon/applications/client/ayon_applications/constants.py new file mode 100644 index 0000000000..92c8f4f254 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/constants.py @@ -0,0 +1,6 @@ +import os + +APPLICATIONS_ADDON_ROOT = os.path.dirname(os.path.abspath(__file__)) + +PLATFORM_NAMES = {"windows", "linux", "darwin"} +DEFAULT_ENV_SUBGROUP = "standard" diff --git a/server_addon/applications/client/ayon_applications/defs.py b/server_addon/applications/client/ayon_applications/defs.py new file mode 100644 index 0000000000..5cc36041a1 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/defs.py @@ -0,0 +1,404 @@ +import os +import platform +import json +import copy + +from ayon_core.lib import find_executable + + +class LaunchTypes: + """Launch types are filters for pre/post-launch hooks. + + Please use these variables in case they'll change values. + """ + + # Local launch - application is launched on local machine + local = "local" + # Farm render job - application is on farm + farm_render = "farm-render" + # Farm publish job - integration post-render job + farm_publish = "farm-publish" + # Remote launch - application is launched on remote machine from which + # can be started publishing + remote = "remote" + # Automated launch - application is launched with automated publishing + automated = "automated" + + +class ApplicationExecutable: + """Representation of executable loaded from settings.""" + + def __init__(self, executable): + # Try to format executable with environments + try: + executable = executable.format(**os.environ) + except Exception: + pass + + # On MacOS check if exists path to executable when ends with `.app` + # - it is common that path will lead to "/Applications/Blender" but + # real path is "/Applications/Blender.app" + if platform.system().lower() == "darwin": + executable = self.macos_executable_prep(executable) + + self.executable_path = executable + + def __str__(self): + return self.executable_path + + def __repr__(self): + return "<{}> {}".format(self.__class__.__name__, self.executable_path) + + @staticmethod + def macos_executable_prep(executable): + """Try to find full path to executable file. + + Real executable is stored in '*.app/Contents/MacOS/'. + + Having path to '*.app' gives ability to read it's plist info and + use "CFBundleExecutable" key from plist to know what is "executable." + + Plist is stored in '*.app/Contents/Info.plist'. + + This is because some '*.app' directories don't have same permissions + as real executable. + """ + # Try to find if there is `.app` file + if not os.path.exists(executable): + _executable = executable + ".app" + if os.path.exists(_executable): + executable = _executable + + # Try to find real executable if executable has `Contents` subfolder + contents_dir = os.path.join(executable, "Contents") + if os.path.exists(contents_dir): + executable_filename = None + # Load plist file and check for bundle executable + plist_filepath = os.path.join(contents_dir, "Info.plist") + if os.path.exists(plist_filepath): + import plistlib + + if hasattr(plistlib, "load"): + with open(plist_filepath, "rb") as stream: + parsed_plist = plistlib.load(stream) + else: + parsed_plist = plistlib.readPlist(plist_filepath) + executable_filename = parsed_plist.get("CFBundleExecutable") + + if executable_filename: + executable = os.path.join( + contents_dir, "MacOS", executable_filename + ) + + return executable + + def as_args(self): + return [self.executable_path] + + def _realpath(self): + """Check if path is valid executable path.""" + # Check for executable in PATH + result = find_executable(self.executable_path) + if result is not None: + return result + + # This is not 100% validation but it is better than remove ability to + # launch .bat, .sh or extentionless files + if os.path.exists(self.executable_path): + return self.executable_path + return None + + def exists(self): + if not self.executable_path: + return False + return bool(self._realpath()) + + +class UndefinedApplicationExecutable(ApplicationExecutable): + """Some applications do not require executable path from settings. + + In that case this class is used to "fake" existing executable. + """ + def __init__(self): + pass + + def __str__(self): + return self.__class__.__name__ + + def __repr__(self): + return "<{}>".format(self.__class__.__name__) + + def as_args(self): + return [] + + def exists(self): + return True + + +class ApplicationGroup: + """Hold information about application group. + + Application group wraps different versions(variants) of application. + e.g. "maya" is group and "maya_2020" is variant. + + Group hold `host_name` which is implementation name used in AYON. Also + holds `enabled` if whole app group is enabled or `icon` for application + icon path in resources. + + Group has also `environment` which hold same environments for all variants. + + Args: + name (str): Groups' name. + data (dict): Group defying data loaded from settings. + manager (ApplicationManager): Manager that created the group. + """ + + def __init__(self, name, data, manager): + self.name = name + self.manager = manager + self._data = data + + self.enabled = data["enabled"] + self.label = data["label"] or None + self.icon = data["icon"] or None + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + self._environment = env + + host_name = data["host_name"] or None + self.is_host = host_name is not None + self.host_name = host_name + + settings_variants = data["variants"] + variants = {} + for variant_data in settings_variants: + app_variant = Application(variant_data, self) + variants[app_variant.name] = app_variant + + self.variants = variants + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class Application: + """Hold information about application. + + Object by itself does nothing special. + + Args: + data (dict): Data for the version containing information about + executables, variant label or if is enabled. + Only required key is `executables`. + group (ApplicationGroup): App group object that created the application + and under which application belongs. + + """ + def __init__(self, data, group): + self._data = data + name = data["name"] + label = data["label"] or name + enabled = False + if group.enabled: + enabled = data.get("enabled", True) + + if group.label: + full_label = " ".join((group.label, label)) + else: + full_label = label + env = {} + try: + env = json.loads(data["environment"]) + except Exception: + pass + + arguments = data["arguments"] + if isinstance(arguments, dict): + arguments = arguments.get(platform.system().lower()) + + if not arguments: + arguments = [] + + _executables = data["executables"].get(platform.system().lower(), []) + executables = [ + ApplicationExecutable(executable) + for executable in _executables + ] + + self.group = group + + self.name = name + self.label = label + self.enabled = enabled + self.use_python_2 = data.get("use_python_2", False) + + self.full_name = "/".join((group.name, name)) + self.full_label = full_label + self.arguments = arguments + self.executables = executables + self._environment = env + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + @property + def manager(self): + return self.group.manager + + @property + def host_name(self): + return self.group.host_name + + @property + def icon(self): + return self.group.icon + + @property + def is_host(self): + return self.group.is_host + + def find_executable(self): + """Try to find existing executable for application. + + Returns (str): Path to executable from `executables` or None if any + exists. + """ + for executable in self.executables: + if executable.exists(): + return executable + return None + + def launch(self, *args, **kwargs): + """Launch the application. + + For this purpose is used manager's launch method to keep logic at one + place. + + Arguments must match with manager's launch method. That's why *args + **kwargs are used. + + Returns: + subprocess.Popen: Return executed process as Popen object. + """ + return self.manager.launch(self.full_name, *args, **kwargs) + + +class EnvironmentToolGroup: + """Hold information about environment tool group. + + Environment tool group may hold different variants of same tool and set + environments that are same for all of them. + + e.g. "mtoa" may have different versions but all environments except one + are same. + + Args: + data (dict): Group information with variants. + manager (ApplicationManager): Manager that creates the group. + """ + + def __init__(self, data, manager): + name = data["name"] + label = data["label"] + + self.name = name + self.label = label + self._data = data + self.manager = manager + + environment = {} + try: + environment = json.loads(data["environment"]) + except Exception: + pass + self._environment = environment + + variants = data.get("variants") or [] + variants_by_name = {} + for variant_data in variants: + tool = EnvironmentTool(variant_data, self) + variants_by_name[tool.name] = tool + self.variants = variants_by_name + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.name) + + def __iter__(self): + for variant in self.variants.values(): + yield variant + + @property + def environment(self): + return copy.deepcopy(self._environment) + + +class EnvironmentTool: + """Hold information about application tool. + + Structure of tool information. + + Args: + variant_data (dict): Variant data with environments and + host and app variant filters. + group (EnvironmentToolGroup): Name of group which wraps tool. + """ + + def __init__(self, variant_data, group): + # Backwards compatibility 3.9.1 - 3.9.2 + # - 'variant_data' contained only environments but contain also host + # and application variant filters + name = variant_data["name"] + label = variant_data["label"] + host_names = variant_data["host_names"] + app_variants = variant_data["app_variants"] + + environment = {} + try: + environment = json.loads(variant_data["environment"]) + except Exception: + pass + + self.host_names = host_names + self.app_variants = app_variants + self.name = name + self.variant_label = label + self.label = " ".join((group.label, label)) + self.group = group + + self._environment = environment + self.full_name = "/".join((group.name, name)) + + def __repr__(self): + return "<{}> - {}".format(self.__class__.__name__, self.full_name) + + @property + def environment(self): + return copy.deepcopy(self._environment) + + def is_valid_for_app(self, app): + """Is tool valid for application. + + Args: + app (Application): Application for which are prepared environments. + """ + if self.app_variants and app.full_name not in self.app_variants: + return False + + if self.host_names and app.host_name not in self.host_names: + return False + return True diff --git a/server_addon/applications/client/ayon_applications/exceptions.py b/server_addon/applications/client/ayon_applications/exceptions.py new file mode 100644 index 0000000000..d5a48d3b6b --- /dev/null +++ b/server_addon/applications/client/ayon_applications/exceptions.py @@ -0,0 +1,50 @@ +class ApplicationNotFound(Exception): + """Application was not found in ApplicationManager by name.""" + + def __init__(self, app_name): + self.app_name = app_name + super(ApplicationNotFound, self).__init__( + "Application \"{}\" was not found.".format(app_name) + ) + + +class ApplicationExecutableNotFound(Exception): + """Defined executable paths are not available on the machine.""" + + def __init__(self, application): + self.application = application + details = None + if not application.executables: + msg = ( + "Executable paths for application \"{}\"({}) are not set." + ) + else: + msg = ( + "Defined executable paths for application \"{}\"({})" + " are not available on this machine." + ) + details = "Defined paths:" + for executable in application.executables: + details += "\n- " + executable.executable_path + + self.msg = msg.format(application.full_label, application.full_name) + self.details = details + + exc_mgs = str(self.msg) + if details: + # Is good idea to pass new line symbol to exception message? + exc_mgs += "\n" + details + self.exc_msg = exc_mgs + super(ApplicationExecutableNotFound, self).__init__(exc_mgs) + + +class ApplicationLaunchFailed(Exception): + """Application launch failed due to known reason. + + Message should be self explanatory as traceback won't be shown. + """ + pass + + +class MissingRequiredKey(KeyError): + pass diff --git a/server_addon/applications/client/ayon_applications/hooks.py b/server_addon/applications/client/ayon_applications/hooks.py new file mode 100644 index 0000000000..6aa12a210a --- /dev/null +++ b/server_addon/applications/client/ayon_applications/hooks.py @@ -0,0 +1,150 @@ +import platform +from abc import ABCMeta, abstractmethod + +import six + +from ayon_core.lib import Logger + +from .defs import LaunchTypes + + +@six.add_metaclass(ABCMeta) +class LaunchHook: + """Abstract base class of launch hook.""" + # Order of prelaunch hook, will be executed as last if set to None. + order = None + # List of host implementations, skipped if empty. + hosts = set() + # Set of application groups + app_groups = set() + # Set of specific application names + app_names = set() + # Set of platform availability + platforms = set() + # Set of launch types for which is available + # - if empty then is available for all launch types + # - by default has 'local' which is most common reason for launc hooks + launch_types = {LaunchTypes.local} + + def __init__(self, launch_context): + """Constructor of launch hook. + + Always should be called + """ + self.log = Logger.get_logger(self.__class__.__name__) + + self.launch_context = launch_context + + is_valid = self.class_validation(launch_context) + if is_valid: + is_valid = self.validate() + + self.is_valid = is_valid + + @classmethod + def class_validation(cls, launch_context): + """Validation of class attributes by launch context. + + Args: + launch_context (ApplicationLaunchContext): Context of launching + application. + + Returns: + bool: Is launch hook valid for the context by class attributes. + """ + if cls.platforms: + low_platforms = tuple( + _platform.lower() + for _platform in cls.platforms + ) + if platform.system().lower() not in low_platforms: + return False + + if cls.hosts: + if launch_context.host_name not in cls.hosts: + return False + + if cls.app_groups: + if launch_context.app_group.name not in cls.app_groups: + return False + + if cls.app_names: + if launch_context.app_name not in cls.app_names: + return False + + if cls.launch_types: + if launch_context.launch_type not in cls.launch_types: + return False + + return True + + @property + def data(self): + return self.launch_context.data + + @property + def application(self): + return getattr(self.launch_context, "application", None) + + @property + def manager(self): + return getattr(self.application, "manager", None) + + @property + def host_name(self): + return getattr(self.application, "host_name", None) + + @property + def app_group(self): + return getattr(self.application, "group", None) + + @property + def app_name(self): + return getattr(self.application, "full_name", None) + + @property + def addons_manager(self): + return getattr(self.launch_context, "addons_manager", None) + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_wrapper' instead. + """ + return self.addons_manager + + def validate(self): + """Optional validation of launch hook on initialization. + + Returns: + bool: Hook is valid (True) or invalid (False). + """ + # QUESTION Not sure if this method has any usable potential. + # - maybe result can be based on settings + return True + + @abstractmethod + def execute(self, *args, **kwargs): + """Abstract execute method where logic of hook is.""" + pass + + +class PreLaunchHook(LaunchHook): + """Abstract class of prelaunch hook. + + This launch hook will be processed before application is launched. + + If any exception will happen during processing the application won't be + launched. + """ + + +class PostLaunchHook(LaunchHook): + """Abstract class of postlaunch hook. + + This launch hook will be processed after application is launched. + + Nothing will happen if any exception will happen during processing. And + processing of other postlaunch hooks won't stop either. + """ diff --git a/server_addon/applications/client/ayon_applications/manager.py b/server_addon/applications/client/ayon_applications/manager.py new file mode 100644 index 0000000000..dca2ff4491 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/manager.py @@ -0,0 +1,676 @@ +import os +import sys +import copy +import json +import tempfile +import platform +import inspect +import subprocess + +import six + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_studio_settings +from ayon_core.lib import ( + Logger, + modules_from_path, + classes_from_module, + get_linux_launcher_args, +) +from ayon_core.addon import AddonsManager + +from .constants import DEFAULT_ENV_SUBGROUP +from .exceptions import ( + ApplicationNotFound, + ApplicationExecutableNotFound, +) +from .hooks import PostLaunchHook, PreLaunchHook +from .defs import EnvironmentToolGroup, ApplicationGroup, LaunchTypes + + +class ApplicationManager: + """Load applications and tools and store them by their full name. + + Args: + studio_settings (dict): Preloaded studio settings. When passed manager + will always use these values. Gives ability to create manager + using different settings. + """ + + def __init__(self, studio_settings=None): + self.log = Logger.get_logger(self.__class__.__name__) + + self.app_groups = {} + self.applications = {} + self.tool_groups = {} + self.tools = {} + + self._studio_settings = studio_settings + + self.refresh() + + def set_studio_settings(self, studio_settings): + """Ability to change init system settings. + + This will trigger refresh of manager. + """ + self._studio_settings = studio_settings + + self.refresh() + + def refresh(self): + """Refresh applications from settings.""" + self.app_groups.clear() + self.applications.clear() + self.tool_groups.clear() + self.tools.clear() + + if self._studio_settings is not None: + settings = copy.deepcopy(self._studio_settings) + else: + settings = get_studio_settings( + clear_metadata=False, exclude_locals=False + ) + + applications_addon_settings = settings["applications"] + + # Prepare known applications + app_defs = applications_addon_settings["applications"] + additional_apps = app_defs.pop("additional_apps") + for additional_app in additional_apps: + app_name = additional_app.pop("name") + if app_name in app_defs: + self.log.warning(( + "Additional application '{}' is already" + " in built-in applications." + ).format(app_name)) + app_defs[app_name] = additional_app + + for group_name, variant_defs in app_defs.items(): + group = ApplicationGroup(group_name, variant_defs, self) + self.app_groups[group_name] = group + for app in group: + self.applications[app.full_name] = app + + tools_definitions = applications_addon_settings["tool_groups"] + for tool_group_data in tools_definitions: + group = EnvironmentToolGroup(tool_group_data, self) + self.tool_groups[group.name] = group + for tool in group: + self.tools[tool.full_name] = tool + + def find_latest_available_variant_for_group(self, group_name): + group = self.app_groups.get(group_name) + if group is None or not group.enabled: + return None + + output = None + for _, variant in reversed(sorted(group.variants.items())): + executable = variant.find_executable() + if executable: + output = variant + break + return output + + def create_launch_context(self, app_name, **data): + """Prepare launch context for application. + + Args: + app_name (str): Name of application that should be launched. + **data (Any): Any additional data. Data may be used during + + Returns: + ApplicationLaunchContext: Launch context for application. + + Raises: + ApplicationNotFound: Application was not found by entered name. + """ + + app = self.applications.get(app_name) + if not app: + raise ApplicationNotFound(app_name) + + executable = app.find_executable() + + return ApplicationLaunchContext( + app, executable, **data + ) + + def launch_with_context(self, launch_context): + """Launch application using existing launch context. + + Args: + launch_context (ApplicationLaunchContext): Prepared launch + context. + """ + + if not launch_context.executable: + raise ApplicationExecutableNotFound(launch_context.application) + return launch_context.launch() + + def launch(self, app_name, **data): + """Launch procedure. + + For host application it's expected to contain "project_name", + "folder_path" and "task_name". + + Args: + app_name (str): Name of application that should be launched. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + + Raises: + ApplicationNotFound: Application was not found by entered + argument `app_name`. + ApplicationExecutableNotFound: Executables in application definition + were not found on this machine. + ApplicationLaunchFailed: Something important for application launch + failed. Exception should contain explanation message, + traceback should not be needed. + """ + + context = self.create_launch_context(app_name, **data) + return self.launch_with_context(context) + + +class ApplicationLaunchContext: + """Context of launching application. + + Main purpose of context is to prepare launch arguments and keyword + arguments for new process. Most important part of keyword arguments + preparations are environment variables. + + During the whole process is possible to use `data` attribute to store + object usable in multiple places. + + Launch arguments are strings in list. It is possible to "chain" argument + when order of them matters. That is possible to do with adding list where + order is right and should not change. + NOTE: This is recommendation, not requirement. + e.g.: `["nuke.exe", "--NukeX"]` -> In this case any part of process may + insert argument between `nuke.exe` and `--NukeX`. To keep them together + it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`. + + Notes: + It is possible to use launch context only to prepare environment + variables. In that case `executable` may be None and can be used + 'run_prelaunch_hooks' method to run prelaunch hooks which prepare + them. + + Args: + application (Application): Application definition. + executable (ApplicationExecutable): Object with path to executable. + env_group (Optional[str]): Environment variable group. If not set + 'DEFAULT_ENV_SUBGROUP' is used. + launch_type (Optional[str]): Launch type. If not set 'local' is used. + **data (dict): Any additional data. Data may be used during + preparation to store objects usable in multiple places. + """ + + def __init__( + self, + application, + executable, + env_group=None, + launch_type=None, + **data + ): + # Application object + self.application = application + + self.addons_manager = AddonsManager() + + # Logger + logger_name = "{}-{}".format(self.__class__.__name__, + self.application.full_name) + self.log = Logger.get_logger(logger_name) + + self.executable = executable + + if launch_type is None: + launch_type = LaunchTypes.local + self.launch_type = launch_type + + if env_group is None: + env_group = DEFAULT_ENV_SUBGROUP + + self.env_group = env_group + + self.data = dict(data) + + launch_args = [] + if executable is not None: + launch_args = executable.as_args() + # subprocess.Popen launch arguments (first argument in constructor) + self.launch_args = launch_args + self.launch_args.extend(application.arguments) + if self.data.get("app_args"): + self.launch_args.extend(self.data.pop("app_args")) + + # Handle launch environemtns + src_env = self.data.pop("env", None) + if src_env is not None and not isinstance(src_env, dict): + self.log.warning(( + "Passed `env` kwarg has invalid type: {}. Expected: `dict`." + " Using `os.environ` instead." + ).format(str(type(src_env)))) + src_env = None + + if src_env is None: + src_env = os.environ + + ignored_env = {"QT_API", } + env = { + key: str(value) + for key, value in src_env.items() + if key not in ignored_env + } + # subprocess.Popen keyword arguments + self.kwargs = {"env": env} + + if platform.system().lower() == "windows": + # Detach new process from currently running process on Windows + flags = ( + subprocess.CREATE_NEW_PROCESS_GROUP + | subprocess.DETACHED_PROCESS + ) + self.kwargs["creationflags"] = flags + + if not sys.stdout: + self.kwargs["stdout"] = subprocess.DEVNULL + self.kwargs["stderr"] = subprocess.DEVNULL + + self.prelaunch_hooks = None + self.postlaunch_hooks = None + + self.process = None + self._prelaunch_hooks_executed = False + + @property + def env(self): + if ( + "env" not in self.kwargs + or self.kwargs["env"] is None + ): + self.kwargs["env"] = {} + return self.kwargs["env"] + + @env.setter + def env(self, value): + if not isinstance(value, dict): + raise ValueError( + "'env' attribute expect 'dict' object. Got: {}".format( + str(type(value)) + ) + ) + self.kwargs["env"] = value + + @property + def modules_manager(self): + """ + Deprecated: + Use 'addons_manager' instead. + + """ + return self.addons_manager + + def _collect_addons_launch_hook_paths(self): + """Helper to collect application launch hooks from addons. + + Module have to have implemented 'get_launch_hook_paths' method which + can expect application as argument or nothing. + + Returns: + List[str]: Paths to launch hook directories. + """ + + expected_types = (list, tuple, set) + + output = [] + for module in self.addons_manager.get_enabled_addons(): + # Skip module if does not have implemented 'get_launch_hook_paths' + func = getattr(module, "get_launch_hook_paths", None) + if func is None: + continue + + func = module.get_launch_hook_paths + if hasattr(inspect, "signature"): + sig = inspect.signature(func) + expect_args = len(sig.parameters) > 0 + else: + expect_args = len(inspect.getargspec(func)[0]) > 0 + + # Pass application argument if method expect it. + try: + if expect_args: + hook_paths = func(self.application) + else: + hook_paths = func() + except Exception: + self.log.warning( + "Failed to call 'get_launch_hook_paths'", + exc_info=True + ) + continue + + if not hook_paths: + continue + + # Convert string to list + if isinstance(hook_paths, six.string_types): + hook_paths = [hook_paths] + + # Skip invalid types + if not isinstance(hook_paths, expected_types): + self.log.warning(( + "Result of `get_launch_hook_paths`" + " has invalid type {}. Expected {}" + ).format(type(hook_paths), expected_types)) + continue + + output.extend(hook_paths) + return output + + def paths_to_launch_hooks(self): + """Directory paths where to look for launch hooks.""" + # This method has potential to be part of application manager (maybe). + paths = [] + + # TODO load additional studio paths from settings + global_hooks_dir = os.path.join(AYON_CORE_ROOT, "hooks") + + hooks_dirs = [ + global_hooks_dir + ] + if self.host_name: + # If host requires launch hooks and is module then launch hooks + # should be collected using 'collect_launch_hook_paths' + # - module have to implement 'get_launch_hook_paths' + host_module = self.addons_manager.get_host_addon(self.host_name) + if not host_module: + hooks_dirs.append(os.path.join( + AYON_CORE_ROOT, "hosts", self.host_name, "hooks" + )) + + for path in hooks_dirs: + if ( + os.path.exists(path) + and os.path.isdir(path) + and path not in paths + ): + paths.append(path) + + # Load modules paths + paths.extend(self._collect_addons_launch_hook_paths()) + + return paths + + def discover_launch_hooks(self, force=False): + """Load and prepare launch hooks.""" + if ( + self.prelaunch_hooks is not None + or self.postlaunch_hooks is not None + ): + if not force: + self.log.info("Launch hooks were already discovered.") + return + + self.prelaunch_hooks.clear() + self.postlaunch_hooks.clear() + + self.log.debug("Discovery of launch hooks started.") + + paths = self.paths_to_launch_hooks() + self.log.debug("Paths searched for launch hooks:\n{}".format( + "\n".join("- {}".format(path) for path in paths) + )) + + all_classes = { + "pre": [], + "post": [] + } + for path in paths: + if not os.path.exists(path): + self.log.info( + "Path to launch hooks does not exist: \"{}\"".format(path) + ) + continue + + modules, _crashed = modules_from_path(path) + for _filepath, module in modules: + all_classes["pre"].extend( + classes_from_module(PreLaunchHook, module) + ) + all_classes["post"].extend( + classes_from_module(PostLaunchHook, module) + ) + + for launch_type, classes in all_classes.items(): + hooks_with_order = [] + hooks_without_order = [] + for klass in classes: + try: + hook = klass(self) + if not hook.is_valid: + self.log.debug( + "Skipped hook invalid for current launch context: " + "{}".format(klass.__name__) + ) + continue + + if inspect.isabstract(hook): + self.log.debug("Skipped abstract hook: {}".format( + klass.__name__ + )) + continue + + # Separate hooks by pre/post class + if hook.order is None: + hooks_without_order.append(hook) + else: + hooks_with_order.append(hook) + + except Exception: + self.log.warning( + "Initialization of hook failed: " + "{}".format(klass.__name__), + exc_info=True + ) + + # Sort hooks with order by order + ordered_hooks = list(sorted( + hooks_with_order, key=lambda obj: obj.order + )) + # Extend ordered hooks with hooks without defined order + ordered_hooks.extend(hooks_without_order) + + if launch_type == "pre": + self.prelaunch_hooks = ordered_hooks + else: + self.postlaunch_hooks = ordered_hooks + + self.log.debug("Found {} prelaunch and {} postlaunch hooks.".format( + len(self.prelaunch_hooks), len(self.postlaunch_hooks) + )) + + @property + def app_name(self): + return self.application.name + + @property + def host_name(self): + return self.application.host_name + + @property + def app_group(self): + return self.application.group + + @property + def manager(self): + return self.application.manager + + def _run_process(self): + # Windows and MacOS have easier process start + low_platform = platform.system().lower() + if low_platform in ("windows", "darwin"): + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Linux uses mid process + # - it is possible that the mid process executable is not + # available for this version of AYON in that case use standard + # launch + launch_args = get_linux_launcher_args() + if launch_args is None: + return subprocess.Popen(self.launch_args, **self.kwargs) + + # Prepare data that will be passed to midprocess + # - store arguments to a json and pass path to json as last argument + # - pass environments to set + app_env = self.kwargs.pop("env", {}) + json_data = { + "args": self.launch_args, + "env": app_env + } + if app_env: + # Filter environments of subprocess + self.kwargs["env"] = { + key: value + for key, value in os.environ.items() + if key in app_env + } + + # Create temp file + json_temp = tempfile.NamedTemporaryFile( + mode="w", prefix="op_app_args", suffix=".json", delete=False + ) + json_temp.close() + json_temp_filpath = json_temp.name + with open(json_temp_filpath, "w") as stream: + json.dump(json_data, stream) + + launch_args.append(json_temp_filpath) + + # Create mid-process which will launch application + process = subprocess.Popen(launch_args, **self.kwargs) + # Wait until the process finishes + # - This is important! The process would stay in "open" state. + process.wait() + # Remove the temp file + os.remove(json_temp_filpath) + # Return process which is already terminated + return process + + def run_prelaunch_hooks(self): + """Run prelaunch hooks. + + This method will be executed only once, any future calls will skip + the processing. + """ + + if self._prelaunch_hooks_executed: + self.log.warning("Prelaunch hooks were already executed.") + return + # Discover launch hooks + self.discover_launch_hooks() + + # Execute prelaunch hooks + for prelaunch_hook in self.prelaunch_hooks: + self.log.debug("Executing prelaunch hook: {}".format( + str(prelaunch_hook.__class__.__name__) + )) + prelaunch_hook.execute() + self._prelaunch_hooks_executed = True + + def launch(self): + """Collect data for new process and then create it. + + This method must not be executed more than once. + + Returns: + subprocess.Popen: Created process as Popen object. + """ + if self.process is not None: + self.log.warning("Application was already launched.") + return + + if not self._prelaunch_hooks_executed: + self.run_prelaunch_hooks() + + self.log.debug("All prelaunch hook executed. Starting new process.") + + # Prepare subprocess args + args_len_str = "" + if isinstance(self.launch_args, str): + args = self.launch_args + else: + args = self.clear_launch_args(self.launch_args) + args_len_str = " ({})".format(len(args)) + self.log.info( + "Launching \"{}\" with args{}: {}".format( + self.application.full_name, args_len_str, args + ) + ) + self.launch_args = args + + # Run process + self.process = self._run_process() + + # Process post launch hooks + for postlaunch_hook in self.postlaunch_hooks: + self.log.debug("Executing postlaunch hook: {}".format( + str(postlaunch_hook.__class__.__name__) + )) + + # TODO how to handle errors? + # - store to variable to let them accessible? + try: + postlaunch_hook.execute() + + except Exception: + self.log.warning( + "After launch procedures were not successful.", + exc_info=True + ) + + self.log.debug("Launch of {} finished.".format( + self.application.full_name + )) + + return self.process + + @staticmethod + def clear_launch_args(args): + """Collect launch arguments to final order. + + Launch argument should be list that may contain another lists this + function will upack inner lists and keep ordering. + + ``` + # source + [ [ arg1, [ arg2, arg3 ] ], arg4, [arg5, arg6]] + # result + [ arg1, arg2, arg3, arg4, arg5, arg6] + + Args: + args (list): Source arguments in list may contain inner lists. + + Return: + list: Unpacked arguments. + """ + if isinstance(args, str): + return args + all_cleared = False + while not all_cleared: + all_cleared = True + new_args = [] + for arg in args: + if isinstance(arg, (list, tuple, set)): + all_cleared = False + for _arg in arg: + new_args.append(_arg) + else: + new_args.append(arg) + args = new_args + + return args + diff --git a/server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py b/server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py new file mode 100644 index 0000000000..f54a551cda --- /dev/null +++ b/server_addon/applications/client/ayon_applications/plugins/publish/collect_app_name.py @@ -0,0 +1,48 @@ +""" +Run after global plugin 'CollectHostName' in ayon_core. + +Requires: + None + +Provides: + context -> hostName (str) + context -> appName (str) + context -> appLabel (str) +""" +import os +import pyblish.api + +from ayon_applications import ApplicationManager + + +class CollectAppName(pyblish.api.ContextPlugin): + """Collect avalon host name to context.""" + + label = "Collect App Name" + order = pyblish.api.CollectorOrder - 0.499999 + + def process(self, context): + host_name = context.data.get("hostName") + app_name = context.data.get("appName") + app_label = context.data.get("appLabel") + # Don't override value if is already set + if host_name and app_name and app_label: + return + + # Use AYON_APP_NAME to get full app name + if not app_name: + app_name = os.environ.get("AYON_APP_NAME") + + # Fill missing values based on app full name + if (not host_name or not app_label) and app_name: + app_manager = ApplicationManager() + app = app_manager.applications.get(app_name) + if app: + if not host_name: + host_name = app.host_name + if not app_label: + app_label = app.full_label + + context.data["hostName"] = host_name + context.data["appName"] = app_name + context.data["appLabel"] = app_label diff --git a/server_addon/applications/client/ayon_applications/utils.py b/server_addon/applications/client/ayon_applications/utils.py new file mode 100644 index 0000000000..234fa6c683 --- /dev/null +++ b/server_addon/applications/client/ayon_applications/utils.py @@ -0,0 +1,609 @@ +import os +import copy +import json +import platform +import collections + +import six +import acre + +from ayon_core import AYON_CORE_ROOT +from ayon_core.settings import get_project_settings +from ayon_core.lib import Logger, get_ayon_username +from ayon_core.addon import AddonsManager +from ayon_core.pipeline import HOST_WORKFILE_EXTENSIONS +from ayon_core.pipeline.template_data import get_template_data +from ayon_core.pipeline.workfile import ( + get_workfile_template_key, + get_workdir_with_workdir_data, + get_last_workfile, + should_use_last_workfile_on_launch, + should_open_workfiles_tool_on_launch, +) + +from .constants import PLATFORM_NAMES, DEFAULT_ENV_SUBGROUP +from .exceptions import MissingRequiredKey, ApplicationLaunchFailed +from .manager import ApplicationManager + + +def parse_environments(env_data, env_group=None, platform_name=None): + """Parse environment values from settings byt group and platform. + + Data may contain up to 2 hierarchical levels of dictionaries. At the end + of the last level must be string or list. List is joined using platform + specific joiner (';' for windows and ':' for linux and mac). + + Hierarchical levels can contain keys for subgroups and platform name. + Platform specific values must be always last level of dictionary. Platform + names are "windows" (MS Windows), "linux" (any linux distribution) and + "darwin" (any MacOS distribution). + + Subgroups are helpers added mainly for standard and on farm usage. Farm + may require different environments for e.g. licence related values or + plugins. Default subgroup is "standard". + + Examples: + ``` + { + # Unchanged value + "ENV_KEY1": "value", + # Empty values are kept (unset environment variable) + "ENV_KEY2": "", + + # Join list values with ':' or ';' + "ENV_KEY3": ["value1", "value2"], + + # Environment groups + "ENV_KEY4": { + "standard": "DEMO_SERVER_URL", + "farm": "LICENCE_SERVER_URL" + }, + + # Platform specific (and only for windows and mac) + "ENV_KEY5": { + "windows": "windows value", + "darwin": ["value 1", "value 2"] + }, + + # Environment groups and platform combination + "ENV_KEY6": { + "farm": "FARM_VALUE", + "standard": { + "windows": ["value1", "value2"], + "linux": "value1", + "darwin": "" + } + } + } + ``` + """ + output = {} + if not env_data: + return output + + if not env_group: + env_group = DEFAULT_ENV_SUBGROUP + + if not platform_name: + platform_name = platform.system().lower() + + for key, value in env_data.items(): + if isinstance(value, dict): + # Look if any key is platform key + # - expect that represents environment group if does not contain + # platform keys + if not PLATFORM_NAMES.intersection(set(value.keys())): + # Skip the key if group is not available + if env_group not in value: + continue + value = value[env_group] + + # Check again if value is dictionary + # - this time there should be only platform keys + if isinstance(value, dict): + value = value.get(platform_name) + + # Check if value is list and join it's values + # QUESTION Should empty values be skipped? + if isinstance(value, (list, tuple)): + value = os.pathsep.join(value) + + # Set key to output if value is string + if isinstance(value, six.string_types): + output[key] = value + return output + + +class EnvironmentPrepData(dict): + """Helper dictionary for storin temp data during environment prep. + + Args: + data (dict): Data must contain required keys. + """ + required_keys = ( + "project_entity", "folder_entity", "task_entity", "app", "anatomy" + ) + + def __init__(self, data): + for key in self.required_keys: + if key not in data: + raise MissingRequiredKey(key) + + if not data.get("log"): + data["log"] = Logger.get_logger("EnvironmentPrepData") + + if data.get("env") is None: + data["env"] = os.environ.copy() + + project_name = data["project_entity"]["name"] + if "project_settings" not in data: + data["project_settings"] = get_project_settings(project_name) + + super(EnvironmentPrepData, self).__init__(data) + + +def get_app_environments_for_context( + project_name, + folder_path, + task_name, + app_name, + env_group=None, + launch_type=None, + env=None, + addons_manager=None +): + """Prepare environment variables by context. + Args: + project_name (str): Name of project. + folder_path (str): Folder path. + task_name (str): Name of task. + app_name (str): Name of application that is launched and can be found + by ApplicationManager. + env_group (Optional[str]): Name of environment group. If not passed + default group is used. + launch_type (Optional[str]): Type for which prelaunch hooks are + executed. + env (Optional[dict[str, str]]): Initial environment variables. + `os.environ` is used when not passed. + addons_manager (Optional[AddonsManager]): Initialized modules + manager. + + Returns: + dict: Environments for passed context and application. + """ + + # Prepare app object which can be obtained only from ApplicationManager + app_manager = ApplicationManager() + context = app_manager.create_launch_context( + app_name, + project_name=project_name, + folder_path=folder_path, + task_name=task_name, + env_group=env_group, + launch_type=launch_type, + env=env, + addons_manager=addons_manager, + modules_manager=addons_manager, + ) + context.run_prelaunch_hooks() + return context.env + + +def _merge_env(env, current_env): + """Modified function(merge) from acre module.""" + result = current_env.copy() + for key, value in env.items(): + # Keep missing keys by not filling `missing` kwarg + value = acre.lib.partial_format(value, data=current_env) + result[key] = value + return result + + +def _add_python_version_paths(app, env, logger, addons_manager): + """Add vendor packages specific for a Python version.""" + + for addon in addons_manager.get_enabled_addons(): + addon.modify_application_launch_arguments(app, env) + + # Skip adding if host name is not set + if not app.host_name: + return + + # Add Python 2/3 modules + python_vendor_dir = os.path.join( + AYON_CORE_ROOT, + "vendor", + "python" + ) + if app.use_python_2: + pythonpath = os.path.join(python_vendor_dir, "python_2") + else: + pythonpath = os.path.join(python_vendor_dir, "python_3") + + if not os.path.exists(pythonpath): + return + + logger.debug("Adding Python version specific paths to PYTHONPATH") + python_paths = [pythonpath] + + # Load PYTHONPATH from current launch context + python_path = env.get("PYTHONPATH") + if python_path: + python_paths.append(python_path) + + # Set new PYTHONPATH to launch context environments + env["PYTHONPATH"] = os.pathsep.join(python_paths) + + +def prepare_app_environments( + data, env_group=None, implementation_envs=True, addons_manager=None +): + """Modify launch environments based on launched app and context. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + app = data["app"] + log = data["log"] + source_env = data["env"].copy() + + if addons_manager is None: + addons_manager = AddonsManager() + + _add_python_version_paths(app, source_env, log, addons_manager) + + # Use environments from local settings + filtered_local_envs = {} + # NOTE Overrides for environment variables are not implemented in AYON. + # project_settings = data["project_settings"] + # whitelist_envs = project_settings["general"].get("local_env_white_list") + # if whitelist_envs: + # local_settings = get_local_settings() + # local_envs = local_settings.get("environments") or {} + # filtered_local_envs = { + # key: value + # for key, value in local_envs.items() + # if key in whitelist_envs + # } + + # Apply local environment variables for already existing values + for key, value in filtered_local_envs.items(): + if key in source_env: + source_env[key] = value + + # `app_and_tool_labels` has debug purpose + app_and_tool_labels = [app.full_name] + # Environments for application + environments = [ + app.group.environment, + app.environment + ] + + folder_entity = data.get("folder_entity") + # Add tools environments + groups_by_name = {} + tool_by_group_name = collections.defaultdict(dict) + if folder_entity: + # Make sure each tool group can be added only once + for key in folder_entity["attrib"].get("tools") or []: + tool = app.manager.tools.get(key) + if not tool or not tool.is_valid_for_app(app): + continue + groups_by_name[tool.group.name] = tool.group + tool_by_group_name[tool.group.name][tool.name] = tool + + for group_name in sorted(groups_by_name.keys()): + group = groups_by_name[group_name] + environments.append(group.environment) + for tool_name in sorted(tool_by_group_name[group_name].keys()): + tool = tool_by_group_name[group_name][tool_name] + environments.append(tool.environment) + app_and_tool_labels.append(tool.full_name) + + log.debug( + "Will add environments for apps and tools: {}".format( + ", ".join(app_and_tool_labels) + ) + ) + + env_values = {} + for _env_values in environments: + if not _env_values: + continue + + # Choose right platform + tool_env = parse_environments(_env_values, env_group) + + # Apply local environment variables + # - must happen between all values because they may be used during + # merge + for key, value in filtered_local_envs.items(): + if key in tool_env: + tool_env[key] = value + + # Merge dictionaries + env_values = _merge_env(tool_env, env_values) + + merged_env = _merge_env(env_values, source_env) + + loaded_env = acre.compute(merged_env, cleanup=False) + + final_env = None + # Add host specific environments + if app.host_name and implementation_envs: + host_addon = addons_manager.get_host_addon(app.host_name) + add_implementation_envs = None + if host_addon: + add_implementation_envs = getattr( + host_addon, "add_implementation_envs", None + ) + if add_implementation_envs: + # Function may only modify passed dict without returning value + final_env = add_implementation_envs(loaded_env, app) + + if final_env is None: + final_env = loaded_env + + keys_to_remove = set(source_env.keys()) - set(final_env.keys()) + + # Update env + data["env"].update(final_env) + for key in keys_to_remove: + data["env"].pop(key, None) + + +def apply_project_environments_value( + project_name, env, project_settings=None, env_group=None +): + """Apply project specific environments on passed environments. + + The environments are applied on passed `env` argument value so it is not + required to apply changes back. + + Args: + project_name (str): Name of project for which environments should be + received. + env (dict): Environment values on which project specific environments + will be applied. + project_settings (dict): Project settings for passed project name. + Optional if project settings are already prepared. + + Returns: + dict: Passed env values with applied project environments. + + Raises: + KeyError: If project settings do not contain keys for project specific + environments. + + """ + if project_settings is None: + project_settings = get_project_settings(project_name) + + env_value = project_settings["core"]["project_environments"] + if env_value: + env_value = json.loads(env_value) + parsed_value = parse_environments(env_value, env_group) + env.update(acre.compute( + _merge_env(parsed_value, env), + cleanup=False + )) + return env + + +def prepare_context_environments(data, env_group=None, addons_manager=None): + """Modify launch environments with context data for launched host. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + + """ + # Context environments + log = data["log"] + + project_entity = data["project_entity"] + folder_entity = data["folder_entity"] + task_entity = data["task_entity"] + if not project_entity: + log.info( + "Skipping context environments preparation." + " Launch context does not contain required data." + ) + return + + # Load project specific environments + project_name = project_entity["name"] + project_settings = get_project_settings(project_name) + data["project_settings"] = project_settings + + app = data["app"] + context_env = { + "AYON_PROJECT_NAME": project_entity["name"], + "AYON_APP_NAME": app.full_name + } + if folder_entity: + folder_path = folder_entity["path"] + context_env["AYON_FOLDER_PATH"] = folder_path + + if task_entity: + context_env["AYON_TASK_NAME"] = task_entity["name"] + + log.debug( + "Context environments set:\n{}".format( + json.dumps(context_env, indent=4) + ) + ) + data["env"].update(context_env) + + # Apply project specific environments on current env value + # - apply them once the context environments are set + apply_project_environments_value( + project_name, data["env"], project_settings, env_group + ) + + if not app.is_host: + return + + data["env"]["AYON_HOST_NAME"] = app.host_name + + if not folder_entity or not task_entity: + # QUESTION replace with log.info and skip workfile discovery? + # - technically it should be possible to launch host without context + raise ApplicationLaunchFailed( + "Host launch require folder and task context." + ) + + workdir_data = get_template_data( + project_entity, + folder_entity, + task_entity, + app.host_name, + project_settings + ) + data["workdir_data"] = workdir_data + + anatomy = data["anatomy"] + + task_type = workdir_data["task"]["type"] + # Temp solution how to pass task type to `_prepare_last_workfile` + data["task_type"] = task_type + + try: + workdir = get_workdir_with_workdir_data( + workdir_data, + anatomy.project_name, + anatomy, + project_settings=project_settings + ) + + except Exception as exc: + raise ApplicationLaunchFailed( + "Error in anatomy.format: {}".format(str(exc)) + ) + + if not os.path.exists(workdir): + log.debug( + "Creating workdir folder: \"{}\"".format(workdir) + ) + try: + os.makedirs(workdir) + except Exception as exc: + raise ApplicationLaunchFailed( + "Couldn't create workdir because: {}".format(str(exc)) + ) + + data["env"]["AYON_WORKDIR"] = workdir + + _prepare_last_workfile(data, workdir, addons_manager) + + +def _prepare_last_workfile(data, workdir, addons_manager): + """last workfile workflow preparation. + + Function check if should care about last workfile workflow and tries + to find the last workfile. Both information are stored to `data` and + environments. + + Last workfile is filled always (with version 1) even if any workfile + exists yet. + + Args: + data (EnvironmentPrepData): Dictionary where result and intermediate + result will be stored. + workdir (str): Path to folder where workfiles should be stored. + + """ + if not addons_manager: + addons_manager = AddonsManager() + + log = data["log"] + + _workdir_data = data.get("workdir_data") + if not _workdir_data: + log.info( + "Skipping last workfile preparation." + " Key `workdir_data` not filled." + ) + return + + app = data["app"] + workdir_data = copy.deepcopy(_workdir_data) + project_name = data["project_name"] + task_name = data["task_name"] + task_type = data["task_type"] + + start_last_workfile = data.get("start_last_workfile") + if start_last_workfile is None: + start_last_workfile = should_use_last_workfile_on_launch( + project_name, app.host_name, task_name, task_type + ) + else: + log.info("Opening of last workfile was disabled by user") + + data["start_last_workfile"] = start_last_workfile + + workfile_startup = should_open_workfiles_tool_on_launch( + project_name, app.host_name, task_name, task_type + ) + data["workfile_startup"] = workfile_startup + + # Store boolean as "0"(False) or "1"(True) + data["env"]["AVALON_OPEN_LAST_WORKFILE"] = ( + str(int(bool(start_last_workfile))) + ) + data["env"]["AYON_WORKFILE_TOOL_ON_START"] = ( + str(int(bool(workfile_startup))) + ) + + _sub_msg = "" if start_last_workfile else " not" + log.debug( + "Last workfile should{} be opened on start.".format(_sub_msg) + ) + + # Last workfile path + last_workfile_path = data.get("last_workfile_path") or "" + if not last_workfile_path: + host_addon = addons_manager.get_host_addon(app.host_name) + if host_addon: + extensions = host_addon.get_workfile_extensions() + else: + extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name) + + if extensions: + anatomy = data["anatomy"] + project_settings = data["project_settings"] + task_type = workdir_data["task"]["type"] + template_key = get_workfile_template_key( + project_name, + task_type, + app.host_name, + project_settings=project_settings + ) + # Find last workfile + file_template = anatomy.get_template_item( + "work", template_key, "file" + ).template + + workdir_data.update({ + "version": 1, + "user": get_ayon_username(), + "ext": extensions[0] + }) + + last_workfile_path = get_last_workfile( + workdir, file_template, workdir_data, extensions, True + ) + + if os.path.exists(last_workfile_path): + log.debug(( + "Workfiles for launch context does not exists" + " yet but path will be set." + )) + log.debug( + "Setting last workfile path: {}".format(last_workfile_path) + ) + + data["env"]["AYON_LAST_WORKFILE"] = last_workfile_path + data["last_workfile_path"] = last_workfile_path diff --git a/server_addon/applications/package.py b/server_addon/applications/package.py new file mode 100644 index 0000000000..ce312ed662 --- /dev/null +++ b/server_addon/applications/package.py @@ -0,0 +1,3 @@ +name = "applications" +title = "Applications" +version = "0.2.0" diff --git a/server_addon/applications/server/__init__.py b/server_addon/applications/server/__init__.py index d5c2de3df3..d85678b77b 100644 --- a/server_addon/applications/server/__init__.py +++ b/server_addon/applications/server/__init__.py @@ -3,9 +3,9 @@ import json import copy from ayon_server.addons import BaseServerAddon, AddonLibrary +from ayon_server.entities.core import attribute_library from ayon_server.lib.postgres import Postgres -from .version import __version__ from .settings import ApplicationsAddonSettings, DEFAULT_VALUES try: @@ -86,9 +86,6 @@ def get_enum_items_from_groups(groups): class ApplicationsAddon(BaseServerAddon): - name = "applications" - title = "Applications" - version = __version__ settings_model = ApplicationsAddonSettings async def get_default_settings(self): @@ -118,9 +115,28 @@ class ApplicationsAddon(BaseServerAddon): ) async def setup(self): - need_restart = await self.create_applications_attribute() + need_restart = await self.create_required_attributes() if need_restart: self.request_server_restart() + await self._update_enums() + + def _get_applications_def(self): + return { + "name": "applications", + "type": "list_of_strings", + "title": "Applications", + "scope": ["project"], + "enum":[], + } + + def _get_tools_def(self): + return { + "name": "tools", + "type": "list_of_strings", + "title": "Tools", + "scope": ["project", "folder", "task"], + "enum":[], + } async def create_applications_attribute(self) -> bool: """Make sure there are required attributes which ftrack addon needs. @@ -129,6 +145,73 @@ class ApplicationsAddon(BaseServerAddon): bool: 'True' if an attribute was created or updated. """ + need_restart = await self.create_required_attributes() + await self._update_enums() + return need_restart + + async def create_required_attributes(self) -> bool: + """Make sure there are required 'applications' and 'tools' attributes. + This only checks for the existence of the attributes, it does not populate + them with any data. When an attribute is added, server needs to be restarted, + while adding enum data to the attribute does not require a restart. + Returns: + bool: 'True' if an attribute was created or updated. + """ + + # keep track of the last attribute position (for adding new attributes) + apps_attribute_data = self._get_applications_def() + tools_attribute_data = self._get_tools_def() + + apps_attrib_name = apps_attribute_data["name"] + tools_attrib_name = tools_attribute_data["name"] + + async with Postgres.acquire() as conn, conn.transaction(): + query = "SELECT BOOL_OR(name = 'applications') AS has_applications, BOOL_OR(name = 'tools') AS has_tools FROM attributes;" + result = (await conn.fetch(query))[0] + + attributes_to_create = {} + if not result["has_applications"]: + attributes_to_create[apps_attrib_name] = { + "scope": apps_attribute_data["scope"], + "data": { + "title": apps_attribute_data["title"], + "type": apps_attribute_data["type"], + "enum": [], + } + } + + if not result["has_tools"]: + attributes_to_create[tools_attrib_name] = { + "scope": tools_attribute_data["scope"], + "data": { + "title": tools_attribute_data["title"], + "type": tools_attribute_data["type"], + "enum": [], + }, + } + + needs_restart = False + # when any of the required attributes are not present, add them + # and return 'True' to indicate that server needs to be restarted + for name, payload in attributes_to_create.items(): + insert_query = "INSERT INTO attributes (name, scope, data, position) VALUES ($1, $2, $3, (SELECT COALESCE(MAX(position), 0) + 1 FROM attributes)) ON CONFLICT DO NOTHING" + await conn.execute( + insert_query, + name, + payload["scope"], + payload["data"], + ) + needs_restart = True + + return needs_restart + + async def _update_enums(self): + """Updates applications and tools enums based on the addon settings. + This method is called when the addon is started (after we are sure that the + 'applications' and 'tools' attributes exist) and when the addon settings are + updated (using on_settings_updated method). + """ + instance = AddonLibrary.getinstance() app_defs = instance.data.get(self.name) all_applications = [] @@ -148,33 +231,32 @@ class ApplicationsAddon(BaseServerAddon): merge_groups(all_applications, app_groups) merge_groups(all_tools, studio_settings["tool_groups"]) - query = "SELECT name, position, scope, data from public.attributes" - apps_attrib_name = "applications" tools_attrib_name = "tools" apps_enum = get_enum_items_from_groups(all_applications) tools_enum = get_enum_items_from_groups(all_tools) + apps_attribute_data = { "type": "list_of_strings", "title": "Applications", - "enum": apps_enum + "enum": apps_enum, } tools_attribute_data = { "type": "list_of_strings", "title": "Tools", - "enum": tools_enum + "enum": tools_enum, } + apps_scope = ["project"] tools_scope = ["project", "folder", "task"] - apps_match_position = None apps_matches = False - tools_match_position = None tools_matches = False - position = 1 - async for row in Postgres.iterate(query): - position += 1 + + async for row in Postgres.iterate( + "SELECT name, position, scope, data from public.attributes" + ): if row["name"] == apps_attrib_name: # Check if scope is matching ftrack addon requirements if ( @@ -182,7 +264,6 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == apps_enum ): apps_matches = True - apps_match_position = row["position"] elif row["name"] == tools_attrib_name: if ( @@ -190,45 +271,41 @@ class ApplicationsAddon(BaseServerAddon): and row["data"].get("enum") == tools_enum ): tools_matches = True - tools_match_position = row["position"] if apps_matches and tools_matches: - return False + return - postgre_query = "\n".join(( - "INSERT INTO public.attributes", - " (name, position, scope, data)", - "VALUES", - " ($1, $2, $3, $4)", - "ON CONFLICT (name)", - "DO UPDATE SET", - " scope = $3,", - " data = $4", - )) if not apps_matches: - # Reuse position from found attribute - if apps_match_position is None: - apps_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - apps_attrib_name, - apps_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, apps_scope, apps_attribute_data, + apps_attrib_name, ) if not tools_matches: - if tools_match_position is None: - tools_match_position = position - position += 1 - await Postgres.execute( - postgre_query, - tools_attrib_name, - tools_match_position, + """ + UPDATE attributes SET + scope = $1, + data = $2 + WHERE + name = $3 + """, tools_scope, tools_attribute_data, + tools_attrib_name, ) - return True + + # Reset attributes cache on server + await attribute_library.load() + + async def on_settings_changed(self, *args, **kwargs): + _ = args, kwargs + await self._update_enums() diff --git a/server_addon/applications/server/applications.json b/server_addon/applications/server/applications.json index b72d117225..e4b72fdff9 100644 --- a/server_addon/applications/server/applications.json +++ b/server_addon/applications/server/applications.json @@ -7,6 +7,26 @@ "host_name": "maya", "environment": "{\n \"MAYA_DISABLE_CLIC_IPM\": \"Yes\",\n \"MAYA_DISABLE_CIP\": \"Yes\",\n \"MAYA_DISABLE_CER\": \"Yes\",\n \"PYMEL_SKIP_MEL_INIT\": \"Yes\",\n \"LC_ALL\": \"C\"\n}\n", "variants": [ + { + "name": "2025", + "label": "2025", + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\Maya2025\\bin\\maya.exe" + ], + "darwin": ["/Applications/Autodesk/maya2025/Maya.app"], + "linux": [ + "/usr/autodesk/maya2025/bin/maya" + ] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": "{\n \"MAYA_VERSION\": \"2025\"\n}", + "use_python_2": false + }, { "name": "2024", "label": "2024", @@ -1225,6 +1245,32 @@ } ] }, + "zbrush": { + "enabled": true, + "label": "Zbrush", + "icon": "{}/app_icons/zbrush.png", + "host_name": "zbrush", + "environment": "{\n \"ZBRUSH_PLUGIN_PATH\": [\n \"{ZBRUSH_PLUGIN_PATH}\",\n \"{OPENPYPE_STUDIO_PLUGINS}/zbrush/api/zscripts\"\n ]\n}", + "variants": [ + { + "name": "2024", + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Maxon ZBrush 2024\\ZBrush.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": "{}" + } + ] + }, "additional_apps": [] } } diff --git a/server_addon/applications/server/settings.py b/server_addon/applications/server/settings.py index a49175d488..5743e9f471 100644 --- a/server_addon/applications/server/settings.py +++ b/server_addon/applications/server/settings.py @@ -188,6 +188,8 @@ class ApplicationsSettings(BaseSettingsModel): default_factory=AppGroupWithPython, title="Wrap") openrv: AppGroup = SettingsField( default_factory=AppGroupWithPython, title="OpenRV") + zbrush: AppGroup = SettingsField( + default_factory=AppGroupWithPython, title="Zbrush") additional_apps: list[AdditionalAppGroup] = SettingsField( default_factory=list, title="Additional Applications") diff --git a/server_addon/applications/server/tools.json b/server_addon/applications/server/tools.json index 54bee11cf7..3d8f400200 100644 --- a/server_addon/applications/server/tools.json +++ b/server_addon/applications/server/tools.json @@ -1,55 +1,149 @@ { "tool_groups": [ { - "environment": "{\n \"MTOA\": \"{STUDIO_SOFTWARE}/arnold/mtoa_{MAYA_VERSION}_{MTOA_VERSION}\",\n \"MAYA_RENDER_DESC_PATH\": \"{MTOA}\",\n \"MAYA_MODULE_PATH\": \"{MTOA}\",\n \"ARNOLD_PLUGIN_PATH\": \"{MTOA}/shaders\",\n \"MTOA_EXTENSIONS_PATH\": {\n \"darwin\": \"{MTOA}/extensions\",\n \"linux\": \"{MTOA}/extensions\",\n \"windows\": \"{MTOA}/extensions\"\n },\n \"MTOA_EXTENSIONS\": {\n \"darwin\": \"{MTOA}/extensions\",\n \"linux\": \"{MTOA}/extensions\",\n \"windows\": \"{MTOA}/extensions\"\n },\n \"DYLD_LIBRARY_PATH\": {\n \"darwin\": \"{MTOA}/bin\"\n },\n \"PATH\": {\n \"windows\": \"{PATH};{MTOA}/bin\"\n }\n}", - "name": "mtoa", - "label": "Autodesk Arnold", + "name": "htoa", + "label": "Arnold for Houdini (example)", "variants": [ { + "name": "5-4-2-7", + "label": "", + "host_names": [ + "houdini" + ], + "environment": "{\n \"HTOA_VERSION\": \"5.4.2.7\"\n}", + "app_variants": [] + } + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"HOUDINI_PATH\": [\n \"{STUDIO_SW}/APP/HTOA/{HTOA_VERSION}/HOUDINI{HOUDINI_VERSION}/WINDOWS/htoa-6.1.3.3_rdb15014_houdini-{HTOA_VERSION}\",\n \"{HOUDINI_PATH}\"\n ],\n \"PATH\": {\n \"windows\": [\n \"{STUDIO_SW}/APP/HTOA/{HTOA_VERSION}/HOUDINI{HOUDINI_VERSION}/WINDOWS/htoa-6.1.3.3_rdb15014_houdini-{HTOA_VERSION}/scripts/bin\",\n \"{PATH}\"\n ]\n }\n}" + }, + { + "name": "mtoa", + "label": "Arnold for Maya (example)", + "variants": [ + { + "name": "5-3-1-0", + "label": "", "host_names": [], - "app_variants": [], - "environment": "{\n \"MTOA_VERSION\": \"3.2\"\n}", - "name": "3-2", - "label": "3.2" + "environment": "{\n \"MTOA_VERSION\": \"5.3.1.0\"\n}", + "app_variants": [] }, { + "name": "5-3-4-1", + "label": "", "host_names": [], - "app_variants": [], - "environment": "{\n \"MTOA_VERSION\": \"3.1\"\n}", - "name": "3-1", - "label": "3.1" + "environment": "{\n \"MTOA_VERSION\": \"5.3.4.1\"\n}", + "app_variants": [] } - ] + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"MTOA\": {\n \"darwin\": \"{STUDIO_SW}/APP/MTOA/{MTOA_VERSION}/MAYA{MAYA_VERSION}/MAC\",\n \"linux\": \"{STUDIO_SW}/APP/MTOA/{MTOA_VERSION}/MAYA{MAYA_VERSION}/LINUX\",\n \"windows\": \"{STUDIO_SW}/APP/MTOA/{MTOA_VERSION}/MAYA{MAYA_VERSION}/WINDOWS\"\n },\n \"MAYA_MODULE_PATH\": [\n \"{STUDIO_SW}/APP/MTOA\",\n \"{MAYA_MODULE_PATH}\"\n ],\n \"DYLD_LIBRARY_PATH\": {\n \"darwin\": \"{MTOA}/bin\"\n },\n \"PATH\": {\n \"windows\": [\n \"{MTOA}/bin\",\n \"{PATH}\"\n ]\n },\n \"XBMLANGPATH\": [\n \"{MTOA}/icons\",\n \"{XBMLANGPATH}\"\n ],\n \"MAYA_RENDER_DESC_PATH\": [\n \"{MTOA}\",\n \"{MAYA_RENDER_DESC_PATH}\"\n ],\n \"MTOA_STARTUP_LOG_VERBOSITY\": \"3\"\n}" }, { - "environment": "{}", - "name": "vray", - "label": "Chaos Group Vray", - "variants": [] - }, - { - "environment": "{}", - "name": "yeti", - "label": "Peregrine Labs Yeti", - "variants": [] - }, - { - "environment": "{}", - "name": "renderman", - "label": "Pixar Renderman", + "name": "redshiftMaya", + "label": "Redshift for Maya (example)", "variants": [ { + "name": "3-5-23", + "label": "", + "host_names": [], + "environment": "{\n \"REDSHIFT_VERSION\": \"3.5.23\"\n}", + "app_variants": [] + } + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"REDSHIFT_COREDATAPATH\": {\n \"darwin\": \"{STUDIO_SW}/APP/REDSHIFT/{REDSHIFT_VERSION}/MAC\",\n \"linux\": \"{STUDIO_SW}/APP/REDSHIFT/{REDSHIFT_VERSION}/LINUX\",\n \"windows\": \"{STUDIO_SW}/APP/REDSHIFT/{REDSHIFT_VERSION}/WINDOWS\"\n },\n \"REDSHIFT_ABORTONLICENSEFAIL\": \"0\",\n \"MAYA_MODULE_PATH\": [\n \"{STUDIO_SW}/APP/REDSHIFT\",\n \"{MAYA_MODULE_PATH}\"\n ],\n \"MAYA_PLUG_IN_PATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Plugins/Maya/{MAYA_VERSION}/nt-x86-64\",\n \"{MAYA_PLUG_IN_PATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/{MAYA_VERSION}\",\n \"{MAYA_PLUG_IN_PATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/{MAYA_VERSION}\",\n \"{MAYA_PLUG_IN_PATH}\"\n ]\n },\n \"MAYA_SCRIPT_PATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Plugins/Maya/Common/scripts\",\n \"{MAYA_SCRIPT_PATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/scripts\",\n \"{MAYA_SCRIPT_PATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/scripts\",\n \"{MAYA_SCRIPT_PATH}\"\n ]\n },\n \"REDSHIFT_PROCEDURALSPATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Procedurals\",\n \"{REDSHIFT_PROCEDURALSPATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/procedurals\",\n \"{REDSHIFT_PROCEDURALSPATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/procedurals\",\n \"{REDSHIFT_PROCEDURALSPATH}\"\n ]\n },\n \"REDSHIFT_MAYAEXTENSIONSPATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Plugins/Maya/{MAYA_VERSION}/nt-x86-64/extensions\",\n \"{REDSHIFT_MAYAEXTENSIONSPATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/{MAYA_VERSION}/extensions\",\n \"{REDSHIFT_MAYAEXTENSIONSPATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/{MAYA_VERSION}/extensions\",\n \"{REDSHIFT_MAYAEXTENSIONSPATH}\"\n ]\n },\n \"XBMLANGPATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Plugins/Maya/Common/icons\",\n \"{XBMLANGPATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/icons\",\n \"{XBMLANGPATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/icons\",\n \"{XBMLANGPATH}\"\n ]\n },\n \"MAYA_RENDER_DESC_PATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Plugins/Maya/Common/rendererDesc\",\n \"{MAYA_RENDER_DESC_PATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/rendererDesc\",\n \"{MAYA_RENDER_DESC_PATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/rendererDesc\",\n \"{MAYA_RENDER_DESC_PATH}\"\n ]\n },\n \"MAYA_CUSTOM_TEMPLATE_PATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Plugins/Maya/Common/scripts/NETemplates\",\n \"{MAYA_CUSTOM_TEMPLATE_PATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/scripts/NETemplates\",\n \"{MAYA_CUSTOM_TEMPLATE_PATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/redshift4maya/common/scripts/NETemplates\",\n \"{MAYA_CUSTOM_TEMPLATE_PATH}\"\n ]\n },\n \"PATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/bin\",\n \"{PATH}\"\n ]\n }\n}" + }, + { + "name": "redshift3dsmax", + "label": "Redshift for 3dsmax (example)", + "variants": [ + { + "name": "3-5-19", + "label": "", + "host_names": [ + "max" + ], + "environment": "{\n \"REDSHIFT_VERSION\": \"3.5.19\"\n}", + "app_variants": [] + } + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"REDSHIFT_COREDATAPATH\": {\n \"darwin\": \"{STUDIO_SW}/APP/REDSHIFT/{REDSHIFT_VERSION}/MAC\",\n \"linux\": \"{STUDIO_SW}/APP/REDSHIFT/{REDSHIFT_VERSION}/LINUX\",\n \"windows\": \"{STUDIO_SW}/APP/REDSHIFT/{REDSHIFT_VERSION}/WINDOWS\"\n },\n \"REDSHIFT_ABORTONLICENSEFAIL\": \"0\",\n \"REDSHIFT_PROCEDURALSPATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/Procedurals\",\n \"{REDSHIFT_PROCEDURALSPATH}\"\n ],\n \"linux\": [\n \"{REDSHIFT_COREDATAPATH}/procedurals\",\n \"{REDSHIFT_PROCEDURALSPATH}\"\n ],\n \"darwin\": [\n \"{REDSHIFT_COREDATAPATH}/procedurals\",\n \"{REDSHIFT_PROCEDURALSPATH}\"\n ]\n },\n \"PATH\": {\n \"windows\": [\n \"{REDSHIFT_COREDATAPATH}/bin\",\n \"{PATH}\"\n ]\n }\n}" + }, + { + "name": "rendermanMaya", + "label": "Renderman for Maya (example)", + "variants": [ + { + "name": "24-3-maya", + "label": "24.3 RFM", "host_names": [ "maya" ], + "environment": "{\n \"RFMTREE\": {\n \"windows\": \"C:\\\\Program Files\\\\Pixar\\\\RenderManForMaya-24.3\",\n \"darwin\": \"/Applications/Pixar/RenderManForMaya-24.3\",\n \"linux\": \"/opt/pixar/RenderManForMaya-24.3\"\n },\n \"RMANTREE\": {\n \"windows\": \"C:\\\\Program Files\\\\Pixar\\\\RenderManProServer-24.3\",\n \"darwin\": \"/Applications/Pixar/RenderManProServer-24.3\",\n \"linux\": \"/opt/pixar/RenderManProServer-24.3\"\n }\n}", "app_variants": [ "maya/2022" - ], - "environment": "{\n \"RFMTREE\": {\n \"windows\": \"C:\\\\Program Files\\\\Pixar\\\\RenderManForMaya-24.3\",\n \"darwin\": \"/Applications/Pixar/RenderManForMaya-24.3\",\n \"linux\": \"/opt/pixar/RenderManForMaya-24.3\"\n },\n \"RMANTREE\": {\n \"windows\": \"C:\\\\Program Files\\\\Pixar\\\\RenderManProServer-24.3\",\n \"darwin\": \"/Applications/Pixar/RenderManProServer-24.3\",\n \"linux\": \"/opt/pixar/RenderManProServer-24.3\"\n }\n}", - "name": "24-3-maya", - "label": "24.3 RFM" + ] } - ] + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"RFMTREE\": {\n \"darwin\": \"{STUDIO_SW}/APP/RENDERMAN/{RM_VERSION}/MAC/MAYA\",\n \"linux\": \"{STUDIO_SW}/APP/RENDERMAN/{RM_VERSION}/LINUX/MAYA\",\n \"windows\": \"{STUDIO_SW}/APP/RENDERMAN/{RM_VERSION}/WINDOWS/MAYA\"\n },\n \"RMANTREE\": {\n \"darwin\": \"{STUDIO_SW}/APP/RENDERMAN/{RM_VERSION}/MAC/RenderManProServer-{RM_VERSION}\",\n \"linux\": \"{STUDIO_SW}/APP/RENDERMAN/{RM_VERSION}/LINUX/RenderManProServer-{RM_VERSION}\",\n \"windows\": \"{STUDIO_SW}/APP/RENDERMAN/{RM_VERSION}/WINDOWS/RenderManProServer-{RM_VERSION}\"\n },\n \"MAYA_MODULE_PATH\": [\n \"{STUDIO_SW}/APP/RENDERMAN\",\n \"{MAYA_MODULE_PATH}\"\n ],\n \"PIXAR_LICENSE_FILE\": \"{STUDIO_SW}/APP/RENDERMAN/pixar.license\",\n \"RFM_DO_NOT_CREATE_MODULE_FILE\": \"1\"\n}" + }, + { + "name": "mGear", + "label": "mGear for Maya (example)", + "variants": [ + { + "name": "4-0-7", + "label": "", + "host_names": [], + "environment": "{\n \"MGEAR_VERSION\": \"4.0.7\"\n}", + "app_variants": [] + } + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"MGEAR_ROOT\": \"{STUDIO_SW}/APP/MGEAR/{MGEAR_VERSION}/MAYA{MAYA_VERSION}/windows/x64\",\n \"MAYA_MODULE_PATH\": [\n \"{STUDIO_SW}/APP/MGEAR/{MGEAR_VERSION}/release\",\n \"{MAYA_MODULE_PATH}\"\n ]\n}" + }, + { + "name": "yetiMaya", + "label": "Yeti for Maya (example)", + "variants": [ + { + "name": "4.2.11", + "label": "", + "host_names": [], + "environment": "{\n \"YETI_VERSION\": \"4.2.11\"\n}", + "app_variants": [] + } + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"YETI_HOME\": {\n \"darwin\": \"{STUDIO_SW}/APP/YETI/{YETI_VERSION}/MAYA{MAYA_VERSION}/MAC\",\n \"linux\": \"{STUDIO_SW}/APP/YETI/{YETI_VERSION}/MAYA{MAYA_VERSION}/LINUX\",\n \"windows\": \"{STUDIO_SW}/APP/YETI/{YETI_VERSION}/MAYA{MAYA_VERSION}/WINDOWS\"\n },\n \"YETI_TMP\": {\n \"windows\": \"C:/temp\",\n \"darwin\": \"/tmp\",\n \"linux\": \"/tmp\"\n },\n \"peregrinel_LICENSE\": \"4202@35.158.197.250\",\n \"MAYA_MODULE_PATH\": [\n \"{STUDIO_SW}/APP/YETI\",\n \"{MAYA_MODULE_PATH}\"\n ]\n}" + }, + { + "name": "vrayMaya", + "label": "Vray for Maya (example)", + "variants": [ + { + "name": "6.10.01", + "label": "", + "host_names": [ + "" + ], + "environment": "{\n \"VRAY_VERSION\": \"6.10.01\"\n}", + "app_variants": [] + } + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"MAYA_MODULE_PATH\": {\n \"windows\": [\n \"{STUDIO_SW}/APP/VRAY/{VRAY_VERSION}/MAYA{MAYA_VERSION}/WINDOWS/maya_root/modules\",\n \"{MAYA_MODULE_PATH}\"\n ],\n \"linux\": [\n \"{STUDIO_SW}/APP/VRAY/{VRAY_VERSION}/MAYA{MAYA_VERSION}/LINUX/maya_root/modules\",\n \"{MAYA_MODULE_PATH}\"\n ],\n \"darwin\": [\n \"{STUDIO_SW}/APP/VRAY/{VRAY_VERSION}/MAYA{MAYA_VERSION}/MAC/maya_root/modules\",\n \"{MAYA_MODULE_PATH}\"\n ]\n },\n \"VRAY_AUTH_CLIENT_FILE_PATH\": \"{STUDIO_SW}/APP/VRAY\"\n}" + }, + { + "name": "vraynuke", + "label": "Vray for Nuke (example)", + "variants": [ + { + "name": "5-20-00", + "label": "", + "host_names": [ + "nuke" + ], + "environment": "{\n \"VRAYNUKE_VERSION\": \"5.20.00\"\n}", + "app_variants": [] + } + ], + "environment": "{\n \"_comment_\": \"{STUDIO_SW} points to software repository. Can be defined in Core addon globally\",\n\n \"VRAY_FOR_NUKE_13_0_PLUGINS\": {\n \"windows\": \"{STUDIO_SW}/APP/VRAYNUKE/{VRAYNUKE_VERSION}/NUKE{NUKE_VRAY_VERSION}/WINDOWS/nuke_vray/plugins/vray\"\n },\n \"NUKE_PATH\": {\n \"windows\": [\n \"{STUDIO_SW}/APP/VRAYNUKE/{VRAYNUKE_VERSION}/NUKE{NUKE_VRAY_VERSION}/WINDOWS/nuke_root\",\n \"{NUKE_PATH}\"\n ]\n },\n \"PATH\": {\n \"windows\": [\n \"{STUDIO_SW}/APP/VRAYNUKE/{VRAYNUKE_VERSION}/NUKE{NUKE_VRAY_VERSION}/WINDOWS/nuke_vray\",\n \"{PATH}\"\n ]\n },\n \"VRAY_AUTH_CLIENT_FILE_PATH\": \"{STUDIO_SW}/APP/VRAY\"\n}" } ] -} +} \ No newline at end of file diff --git a/server_addon/applications/server/version.py b/server_addon/applications/server/version.py deleted file mode 100644 index 0a8da88258..0000000000 --- a/server_addon/applications/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.6" diff --git a/server_addon/blender/package.py b/server_addon/blender/package.py new file mode 100644 index 0000000000..667076e533 --- /dev/null +++ b/server_addon/blender/package.py @@ -0,0 +1,3 @@ +name = "blender" +title = "Blender" +version = "0.1.8" diff --git a/server_addon/blender/server/__init__.py b/server_addon/blender/server/__init__.py index a7d6cb4400..b274e3bc29 100644 --- a/server_addon/blender/server/__init__.py +++ b/server_addon/blender/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import BlenderSettings, DEFAULT_VALUES class BlenderAddon(BaseServerAddon): - name = "blender" - title = "Blender" - version = __version__ settings_model: Type[BlenderSettings] = BlenderSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/blender/server/settings/main.py b/server_addon/blender/server/settings/main.py index aed9b5632d..3cca22ae3b 100644 --- a/server_addon/blender/server/settings/main.py +++ b/server_addon/blender/server/settings/main.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( from .imageio import BlenderImageIOModel from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_BLENDER_PUBLISH_SETTINGS ) from .render_settings import ( @@ -47,8 +47,8 @@ class BlenderSettings(BaseSettingsModel): default_factory=TemplateWorkfileBaseOptions, title="Workfile Builder" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins" ) diff --git a/server_addon/blender/server/settings/publish_plugins.py b/server_addon/blender/server/settings/publish_plugins.py index c2a989dd55..e998d7b057 100644 --- a/server_addon/blender/server/settings/publish_plugins.py +++ b/server_addon/blender/server/settings/publish_plugins.py @@ -44,6 +44,14 @@ class ExtractBlendModel(BaseSettingsModel): default_factory=list, title="Families" ) + compress: bool = SettingsField(True, title="Compress") + + +class ExtractBlendAnimationModel(BaseSettingsModel): + enabled: bool = SettingsField(True) + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + compress: bool = SettingsField(False, title="Compress") class ExtractPlayblastModel(BaseSettingsModel): @@ -51,13 +59,14 @@ class ExtractPlayblastModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") presets: str = SettingsField("", title="Presets", widget="textarea") + compress: bool = SettingsField(False, title="Compress") @validator("presets") def validate_json(cls, value): return validate_json_dict(value) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): ValidateCameraZeroKeyframe: ValidatePluginModel = SettingsField( default_factory=ValidatePluginModel, title="Validate Camera Zero Keyframe", @@ -80,6 +89,10 @@ class PublishPuginsModel(BaseSettingsModel): default_factory=ValidatePluginModel, title="Validate Mesh No Negative Scale" ) + ValidateModelMeshUvMap1: ValidatePluginModel = SettingsField( + default_factory=ValidatePluginModel, + title="Validate Model Mesh Has UV map named map1" + ) ValidateTransformZero: ValidatePluginModel = SettingsField( default_factory=ValidatePluginModel, title="Validate Transform Zero" @@ -110,8 +123,8 @@ class PublishPuginsModel(BaseSettingsModel): default_factory=ValidatePluginModel, title="Extract ABC" ) - ExtractBlendAnimation: ValidatePluginModel = SettingsField( - default_factory=ValidatePluginModel, + ExtractBlendAnimation: ExtractBlendAnimationModel = SettingsField( + default_factory=ExtractBlendAnimationModel, title="Extract Blend Animation" ) ExtractAnimationFBX: ValidatePluginModel = SettingsField( @@ -172,6 +185,11 @@ DEFAULT_BLENDER_PUBLISH_SETTINGS = { "optional": False, "active": True }, + "ValidateModelMeshUvMap1": { + "enabled": False, + "optional": True, + "active": True + }, "ValidateTransformZero": { "enabled": False, "optional": True, @@ -198,7 +216,8 @@ DEFAULT_BLENDER_PUBLISH_SETTINGS = { "action", "layout", "blendScene" - ] + ], + "compress": False }, "ExtractFBX": { "enabled": False, @@ -213,7 +232,8 @@ DEFAULT_BLENDER_PUBLISH_SETTINGS = { "ExtractBlendAnimation": { "enabled": True, "optional": True, - "active": True + "active": True, + "compress": False }, "ExtractAnimationFBX": { "enabled": False, diff --git a/server_addon/blender/server/version.py b/server_addon/blender/server/version.py deleted file mode 100644 index 0a8da88258..0000000000 --- a/server_addon/blender/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.6" diff --git a/server_addon/celaction/package.py b/server_addon/celaction/package.py new file mode 100644 index 0000000000..2b11a8630f --- /dev/null +++ b/server_addon/celaction/package.py @@ -0,0 +1,3 @@ +name = "celaction" +title = "CelAction" +version = "0.1.0" diff --git a/server_addon/celaction/server/__init__.py b/server_addon/celaction/server/__init__.py index 90d3dbaa01..e3769a4b7f 100644 --- a/server_addon/celaction/server/__init__.py +++ b/server_addon/celaction/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import CelActionSettings, DEFAULT_VALUES class CelActionAddon(BaseServerAddon): - name = "celaction" - title = "CelAction" - version = __version__ settings_model: Type[CelActionSettings] = CelActionSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/celaction/server/settings.py b/server_addon/celaction/server/settings.py index 9208948a07..afa9773477 100644 --- a/server_addon/celaction/server/settings.py +++ b/server_addon/celaction/server/settings.py @@ -42,7 +42,7 @@ class WorkfileModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectRenderPath: CollectRenderPathModel = SettingsField( default_factory=CollectRenderPathModel, title="Collect Render Path" @@ -57,8 +57,8 @@ class CelActionSettings(BaseSettingsModel): workfile: WorkfileModel = SettingsField( title="Workfile" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins", ) diff --git a/server_addon/celaction/server/version.py b/server_addon/celaction/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/celaction/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/clockify/package.py b/server_addon/clockify/package.py new file mode 100644 index 0000000000..bcf9425b3f --- /dev/null +++ b/server_addon/clockify/package.py @@ -0,0 +1,3 @@ +name = "clockify" +title = "Clockify" +version = "0.1.1" diff --git a/server_addon/clockify/server/__init__.py b/server_addon/clockify/server/__init__.py index 0fa453fdf4..11bbfed261 100644 --- a/server_addon/clockify/server/__init__.py +++ b/server_addon/clockify/server/__init__.py @@ -2,14 +2,8 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import ClockifySettings class ClockifyAddon(BaseServerAddon): - name = "clockify" - title = "Clockify" - version = __version__ settings_model: Type[ClockifySettings] = ClockifySettings - frontend_scopes = {} - services = {} diff --git a/server_addon/clockify/server/version.py b/server_addon/clockify/server/version.py deleted file mode 100644 index 485f44ac21..0000000000 --- a/server_addon/clockify/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.1" diff --git a/server_addon/create_ayon_addons.py b/server_addon/create_ayon_addons.py index 9553980f5d..f0a36d4740 100644 --- a/server_addon/create_ayon_addons.py +++ b/server_addon/create_ayon_addons.py @@ -1,10 +1,11 @@ import os import sys import re -import json import shutil import argparse import zipfile +import types +import importlib.machinery import platform import collections from pathlib import Path @@ -45,6 +46,11 @@ version = "{addon_version}" plugin_for = ["ayon_server"] """ +CLIENT_VERSION_CONTENT = '''# -*- coding: utf-8 -*- +"""Package declaring AYON core addon version.""" +__version__ = "{}" +''' + class ZipFileLongPaths(zipfile.ZipFile): """Allows longer paths in zip files. @@ -176,13 +182,71 @@ def create_addon_zip( shutil.rmtree(str(output_dir / addon_name)) +def prepare_client_code( + addon_dir: Path, + addon_output_dir: Path, + addon_version: str +): + client_dir = addon_dir / "client" + if not client_dir.exists(): + return + + # Prepare private dir in output + private_dir = addon_output_dir / "private" + private_dir.mkdir(parents=True, exist_ok=True) + + # Copy pyproject toml if available + pyproject_toml = client_dir / "pyproject.toml" + if pyproject_toml.exists(): + shutil.copy(pyproject_toml, private_dir) + + for subpath in client_dir.iterdir(): + if subpath.name == "pyproject.toml": + continue + + if subpath.is_file(): + continue + + # Update version.py with server version if 'version.py' is available + version_path = subpath / "version.py" + if version_path.exists(): + with open(version_path, "w") as stream: + stream.write(CLIENT_VERSION_CONTENT.format(addon_version)) + + zip_filepath = private_dir / "client.zip" + with ZipFileLongPaths(zip_filepath, "w", zipfile.ZIP_DEFLATED) as zipf: + # Add client code content to zip + for path, sub_path in find_files_in_subdir(str(subpath)): + sub_path = os.path.join(subpath.name, sub_path) + zipf.write(path, sub_path) + + +def import_filepath(path: Path, module_name: Optional[str] = None): + if not module_name: + module_name = os.path.splitext(path.name)[0] + + # Convert to string + path = str(path) + module = types.ModuleType(module_name) + module.__file__ = path + + # Use loader so module has full specs + module_loader = importlib.machinery.SourceFileLoader( + module_name, path + ) + module_loader.exec_module(module) + return module + + def create_addon_package( addon_dir: Path, output_dir: Path, create_zip: bool, keep_source: bool, ): - addon_version = get_addon_version(addon_dir) + src_package_py = addon_dir / "package.py" + package = import_filepath(src_package_py) + addon_version = package.version addon_output_dir = output_dir / addon_dir.name / addon_version if addon_output_dir.exists(): @@ -190,22 +254,16 @@ def create_addon_package( addon_output_dir.mkdir(parents=True) # Copy server content - package_py = addon_output_dir / "package.py" - addon_name = addon_dir.name - if addon_name == "royal_render": - addon_name = "royalrender" - package_py_content = PACKAGE_PY_TEMPLATE.format( - addon_name=addon_name, addon_version=addon_version - ) - - with open(package_py, "w+") as pkg_py: - pkg_py.write(package_py_content) + dst_package_py = addon_output_dir / "package.py" + shutil.copy(src_package_py, dst_package_py) server_dir = addon_dir / "server" shutil.copytree( server_dir, addon_output_dir / "server", dirs_exist_ok=True ) + prepare_client_code(addon_dir, addon_output_dir, addon_version) + if create_zip: create_addon_zip( output_dir, addon_dir.name, addon_version, keep_source @@ -220,7 +278,6 @@ def main( addons=None, ): current_dir = Path(os.path.dirname(os.path.abspath(__file__))) - root_dir = current_dir.parent create_zip = not skip_zip if output_dir: diff --git a/server_addon/deadline/package.py b/server_addon/deadline/package.py new file mode 100644 index 0000000000..944797fea6 --- /dev/null +++ b/server_addon/deadline/package.py @@ -0,0 +1,3 @@ +name = "deadline" +title = "Deadline" +version = "0.1.10" diff --git a/server_addon/deadline/server/__init__.py b/server_addon/deadline/server/__init__.py index 36d04189a9..e7dcb7d347 100644 --- a/server_addon/deadline/server/__init__.py +++ b/server_addon/deadline/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import DeadlineSettings, DEFAULT_VALUES class Deadline(BaseServerAddon): - name = "deadline" - title = "Deadline" - version = __version__ settings_model: Type[DeadlineSettings] = DeadlineSettings async def get_default_settings(self): diff --git a/server_addon/deadline/server/settings/main.py b/server_addon/deadline/server/settings/main.py index 9537d6d550..21a314cd2f 100644 --- a/server_addon/deadline/server/settings/main.py +++ b/server_addon/deadline/server/settings/main.py @@ -1,3 +1,4 @@ +from typing import TYPE_CHECKING from pydantic import validator from ayon_server.settings import ( @@ -5,6 +6,8 @@ from ayon_server.settings import ( SettingsField, ensure_unique_names, ) +if TYPE_CHECKING: + from ayon_server.addons import BaseServerAddon from .publish_plugins import ( PublishPluginsModel, diff --git a/server_addon/deadline/server/settings/publish_plugins.py b/server_addon/deadline/server/settings/publish_plugins.py index 10ec8ac95f..9f69143e37 100644 --- a/server_addon/deadline/server/settings/publish_plugins.py +++ b/server_addon/deadline/server/settings/publish_plugins.py @@ -211,6 +211,32 @@ class HarmonySubmitDeadlineModel(BaseSettingsModel): department: str = SettingsField(title="Department") +class HoudiniSubmitDeadlineModel(BaseSettingsModel): + """Houdini deadline render submitter settings.""" + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + + priority: int = SettingsField(title="Priority") + chunk_size: int = SettingsField(title="Chunk Size") + group: str = SettingsField(title="Group") + + export_priority: int = SettingsField(title="Export Priority") + export_chunk_size: int = SettingsField(title="Export Chunk Size") + export_group: str = SettingsField(title="Export Group") + + +class HoudiniCacheSubmitDeadlineModel(BaseSettingsModel): + """Houdini deadline cache submitter settings.""" + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + + priority: int = SettingsField(title="Priority") + chunk_size: int = SettingsField(title="Chunk Size") + group: str = SettingsField(title="Group") + + class AfterEffectsSubmitDeadlineModel(BaseSettingsModel): """After Effects deadline submitter settings.""" @@ -332,6 +358,12 @@ class PublishPluginsModel(BaseSettingsModel): HarmonySubmitDeadline: HarmonySubmitDeadlineModel = SettingsField( default_factory=HarmonySubmitDeadlineModel, title="Harmony Submit to deadline") + HoudiniCacheSubmitDeadline: HoudiniCacheSubmitDeadlineModel = SettingsField( + default_factory=HoudiniCacheSubmitDeadlineModel, + title="Houdini Submit cache to deadline") + HoudiniSubmitDeadline: HoudiniSubmitDeadlineModel = SettingsField( + default_factory=HoudiniSubmitDeadlineModel, + title="Houdini Submit render to deadline") MaxSubmitDeadline: MaxSubmitDeadlineModel = SettingsField( default_factory=MaxSubmitDeadlineModel, title="Max Submit to deadline") @@ -416,6 +448,25 @@ DEFAULT_DEADLINE_PLUGINS_SETTINGS = { "group": "", "department": "" }, + "HoudiniCacheSubmitDeadline": { + "enabled": True, + "optional": False, + "active": True, + "priority": 50, + "chunk_size": 999999, + "group": "" + }, + "HoudiniSubmitDeadline": { + "enabled": True, + "optional": False, + "active": True, + "priority": 50, + "chunk_size": 1, + "group": "", + "export_priority": 50, + "export_chunk_size": 10, + "export_group": "" + }, "MaxSubmitDeadline": { "enabled": True, "optional": False, diff --git a/server_addon/deadline/server/version.py b/server_addon/deadline/server/version.py deleted file mode 100644 index c11f861afb..0000000000 --- a/server_addon/deadline/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.9" diff --git a/server_addon/flame/package.py b/server_addon/flame/package.py new file mode 100644 index 0000000000..8c077ed91d --- /dev/null +++ b/server_addon/flame/package.py @@ -0,0 +1,3 @@ +name = "flame" +title = "Flame" +version = "0.1.0" diff --git a/server_addon/flame/server/__init__.py b/server_addon/flame/server/__init__.py index 7d5eb3960f..4aa46617ee 100644 --- a/server_addon/flame/server/__init__.py +++ b/server_addon/flame/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import FlameSettings, DEFAULT_VALUES class FlameAddon(BaseServerAddon): - name = "flame" - title = "Flame" - version = __version__ settings_model: Type[FlameSettings] = FlameSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/flame/server/settings/create_plugins.py b/server_addon/flame/server/settings/create_plugins.py index 44fb8a2e91..2f17ec40c4 100644 --- a/server_addon/flame/server/settings/create_plugins.py +++ b/server_addon/flame/server/settings/create_plugins.py @@ -87,7 +87,7 @@ class CreateShotClipModel(BaseSettingsModel): ) -class CreatePuginsModel(BaseSettingsModel): +class CreatePluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModel = SettingsField( default_factory=CreateShotClipModel, title="Create Shot Clip" diff --git a/server_addon/flame/server/settings/main.py b/server_addon/flame/server/settings/main.py index 047f5af287..c838ee9646 100644 --- a/server_addon/flame/server/settings/main.py +++ b/server_addon/flame/server/settings/main.py @@ -1,8 +1,8 @@ from ayon_server.settings import BaseSettingsModel, SettingsField from .imageio import FlameImageIOModel, DEFAULT_IMAGEIO_SETTINGS -from .create_plugins import CreatePuginsModel, DEFAULT_CREATE_SETTINGS -from .publish_plugins import PublishPuginsModel, DEFAULT_PUBLISH_SETTINGS +from .create_plugins import CreatePluginsModel, DEFAULT_CREATE_SETTINGS +from .publish_plugins import PublishPluginsModel, DEFAULT_PUBLISH_SETTINGS from .loader_plugins import LoaderPluginsModel, DEFAULT_LOADER_SETTINGS @@ -11,12 +11,12 @@ class FlameSettings(BaseSettingsModel): default_factory=FlameImageIOModel, title="Color Management (ImageIO)" ) - create: CreatePuginsModel = SettingsField( - default_factory=CreatePuginsModel, + create: CreatePluginsModel = SettingsField( + default_factory=CreatePluginsModel, title="Create plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) load: LoaderPluginsModel = SettingsField( diff --git a/server_addon/flame/server/settings/publish_plugins.py b/server_addon/flame/server/settings/publish_plugins.py index decb00fcfa..b34083b4e2 100644 --- a/server_addon/flame/server/settings/publish_plugins.py +++ b/server_addon/flame/server/settings/publish_plugins.py @@ -121,7 +121,7 @@ class IntegrateBatchGroupModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectTimelineInstances: CollectTimelineInstancesModel = SettingsField( default_factory=CollectTimelineInstancesModel, title="Collect Timeline Instances" diff --git a/server_addon/flame/server/version.py b/server_addon/flame/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/flame/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/fusion/package.py b/server_addon/fusion/package.py new file mode 100644 index 0000000000..9e7a46df2c --- /dev/null +++ b/server_addon/fusion/package.py @@ -0,0 +1,3 @@ +name = "fusion" +title = "Fusion" +version = "0.1.5" diff --git a/server_addon/fusion/server/__init__.py b/server_addon/fusion/server/__init__.py index 4d43f28812..0456cfd5ee 100644 --- a/server_addon/fusion/server/__init__.py +++ b/server_addon/fusion/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import FusionSettings, DEFAULT_VALUES class FusionAddon(BaseServerAddon): - name = "fusion" - title = "Fusion" - version = __version__ settings_model: Type[FusionSettings] = FusionSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/fusion/server/settings.py b/server_addon/fusion/server/settings.py index a913db16da..f16ae6e3e7 100644 --- a/server_addon/fusion/server/settings.py +++ b/server_addon/fusion/server/settings.py @@ -75,6 +75,12 @@ class HooksModel(BaseSettingsModel): default_factory=HookOptionalModel, title="Install PySide2" ) + FusionLaunchMenuHook: HookOptionalModel = SettingsField( + default_factory=HookOptionalModel, + title="Launch AYON Menu on Fusion Start", + description="Launch the AYON menu on Fusion application startup. " + "This is only supported for Fusion 18+" + ) class CreateSaverModel(CreateSaverPluginModel): @@ -143,6 +149,9 @@ DEFAULT_VALUES = { "hooks": { "InstallPySideToFusion": { "enabled": True + }, + "FusionLaunchMenuHook": { + "enabled": False } }, "create": { diff --git a/server_addon/fusion/server/version.py b/server_addon/fusion/server/version.py deleted file mode 100644 index bbab0242f6..0000000000 --- a/server_addon/fusion/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.4" diff --git a/server_addon/harmony/package.py b/server_addon/harmony/package.py new file mode 100644 index 0000000000..83e88e7d57 --- /dev/null +++ b/server_addon/harmony/package.py @@ -0,0 +1,3 @@ +name = "harmony" +title = "Harmony" +version = "0.1.2" diff --git a/server_addon/harmony/server/__init__.py b/server_addon/harmony/server/__init__.py index 4ecda1989e..154618241e 100644 --- a/server_addon/harmony/server/__init__.py +++ b/server_addon/harmony/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon from .settings import HarmonySettings, DEFAULT_HARMONY_SETTING -from .version import __version__ class Harmony(BaseServerAddon): - name = "harmony" - title = "Harmony" - version = __version__ - settings_model = HarmonySettings async def get_default_settings(self): diff --git a/server_addon/harmony/server/version.py b/server_addon/harmony/server/version.py deleted file mode 100644 index df0c92f1e2..0000000000 --- a/server_addon/harmony/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.2" diff --git a/server_addon/hiero/package.py b/server_addon/hiero/package.py new file mode 100644 index 0000000000..cabe68eb68 --- /dev/null +++ b/server_addon/hiero/package.py @@ -0,0 +1,3 @@ +name = "hiero" +title = "Hiero" +version = "0.1.2" diff --git a/server_addon/hiero/server/__init__.py b/server_addon/hiero/server/__init__.py index d0f9bcefc3..3db78eafd7 100644 --- a/server_addon/hiero/server/__init__.py +++ b/server_addon/hiero/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import HieroSettings, DEFAULT_VALUES class HieroAddon(BaseServerAddon): - name = "hiero" - title = "Hiero" - version = __version__ settings_model: Type[HieroSettings] = HieroSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/hiero/server/settings/loader_plugins.py b/server_addon/hiero/server/settings/loader_plugins.py index b5a81d1ae2..682f9fd2d9 100644 --- a/server_addon/hiero/server/settings/loader_plugins.py +++ b/server_addon/hiero/server/settings/loader_plugins.py @@ -15,7 +15,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadClip: LoadClipModel = SettingsField( default_factory=LoadClipModel, title="Load Clip" diff --git a/server_addon/hiero/server/settings/main.py b/server_addon/hiero/server/settings/main.py index b170ecafb8..378af6a539 100644 --- a/server_addon/hiero/server/settings/main.py +++ b/server_addon/hiero/server/settings/main.py @@ -9,11 +9,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .scriptsmenu import ( @@ -35,12 +35,12 @@ class HieroSettings(BaseSettingsModel): default_factory=CreatorPluginsSettings, title="Creator Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader plugins" ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish plugins" ) scriptsmenu: ScriptsmenuSettings = SettingsField( diff --git a/server_addon/hiero/server/settings/publish_plugins.py b/server_addon/hiero/server/settings/publish_plugins.py index c35c61c332..0e43d4ce3a 100644 --- a/server_addon/hiero/server/settings/publish_plugins.py +++ b/server_addon/hiero/server/settings/publish_plugins.py @@ -49,7 +49,7 @@ class ExtractReviewCutUpVideoModel(BaseSettingsModel): ) -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceVersion: CollectInstanceVersionModel = SettingsField( default_factory=CollectInstanceVersionModel, title="Collect Instance Version" diff --git a/server_addon/hiero/server/version.py b/server_addon/hiero/server/version.py deleted file mode 100644 index b3f4756216..0000000000 --- a/server_addon/hiero/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.2" diff --git a/server_addon/houdini/package.py b/server_addon/houdini/package.py new file mode 100644 index 0000000000..4e441c76ae --- /dev/null +++ b/server_addon/houdini/package.py @@ -0,0 +1,3 @@ +name = "houdini" +title = "Houdini" +version = "0.2.13" diff --git a/server_addon/houdini/server/__init__.py b/server_addon/houdini/server/__init__.py index 870ec2d0b7..8c1ffcb0b3 100644 --- a/server_addon/houdini/server/__init__.py +++ b/server_addon/houdini/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import HoudiniSettings, DEFAULT_VALUES class Houdini(BaseServerAddon): - name = "houdini" - title = "Houdini" - version = __version__ settings_model: Type[HoudiniSettings] = HoudiniSettings async def get_default_settings(self): diff --git a/server_addon/houdini/server/settings/imageio.py b/server_addon/houdini/server/settings/imageio.py index f4850c5df7..c4f4813d51 100644 --- a/server_addon/houdini/server/settings/imageio.py +++ b/server_addon/houdini/server/settings/imageio.py @@ -34,6 +34,34 @@ class ImageIOFileRulesModel(BaseSettingsModel): return value +class WorkfileImageIOModel(BaseSettingsModel): + """Workfile settings help. + + Empty values will be skipped, allowing any existing env vars to + pass through as defined. + + Note: The render space in Houdini is + always set to the 'scene_linear' role.""" + + enabled: bool = SettingsField(False, title="Enabled") + default_display: str = SettingsField( + title="Default active displays", + description="It behaves like the 'OCIO_ACTIVE_DISPLAYS' env var," + " Colon-separated list of displays, e.g ACES:P3" + ) + default_view: str = SettingsField( + title="Default active views", + description="It behaves like the 'OCIO_ACTIVE_VIEWS' env var," + " Colon-separated list of views, e.g sRGB:DCDM" + ) + review_color_space: str = SettingsField( + title="Review colorspace", + description="It exposes OCIO Colorspace parameter in opengl nodes." + "if left empty, Ayon will figure out the default " + "colorspace using your default display and default view." + ) + + class HoudiniImageIOModel(BaseSettingsModel): activate_host_color_management: bool = SettingsField( True, title="Enable Color Management" @@ -46,3 +74,26 @@ class HoudiniImageIOModel(BaseSettingsModel): default_factory=ImageIOFileRulesModel, title="File Rules" ) + workfile: WorkfileImageIOModel = SettingsField( + default_factory=WorkfileImageIOModel, + title="Workfile" + ) + + +DEFAULT_IMAGEIO_SETTINGS = { + "activate_host_color_management": False, + "ocio_config": { + "override_global_config": False, + "filepath": [] + }, + "file_rules": { + "activate_host_rules": False, + "rules": [] + }, + "workfile": { + "enabled": False, + "default_display": "ACES", + "default_view": "sRGB", + "review_color_space": "" + } +} diff --git a/server_addon/houdini/server/settings/main.py b/server_addon/houdini/server/settings/main.py index cbb19d15b7..3acab0ce74 100644 --- a/server_addon/houdini/server/settings/main.py +++ b/server_addon/houdini/server/settings/main.py @@ -3,7 +3,10 @@ from .general import ( GeneralSettingsModel, DEFAULT_GENERAL_SETTINGS ) -from .imageio import HoudiniImageIOModel +from .imageio import ( + HoudiniImageIOModel, + DEFAULT_IMAGEIO_SETTINGS +) from .shelves import ShelvesModel from .create import ( CreatePluginsModel, @@ -40,6 +43,7 @@ class HoudiniSettings(BaseSettingsModel): DEFAULT_VALUES = { "general": DEFAULT_GENERAL_SETTINGS, + "imageio": DEFAULT_IMAGEIO_SETTINGS, "shelves": [], "create": DEFAULT_HOUDINI_CREATE_SETTINGS, "publish": DEFAULT_HOUDINI_PUBLISH_SETTINGS diff --git a/server_addon/houdini/server/settings/publish.py b/server_addon/houdini/server/settings/publish.py index 1741568d63..8e0e7f7795 100644 --- a/server_addon/houdini/server/settings/publish.py +++ b/server_addon/houdini/server/settings/publish.py @@ -53,6 +53,9 @@ class PublishPluginsModel(BaseSettingsModel): default_factory=BasicValidateModel, title="Validate Latest Containers.", section="Validators") + ValidateInstanceInContextHoudini: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Instance is in same Context.") ValidateMeshIsStatic: BasicValidateModel = SettingsField( default_factory=BasicValidateModel, title="Validate Mesh is Static.") @@ -84,6 +87,11 @@ DEFAULT_HOUDINI_PUBLISH_SETTINGS = { "optional": True, "active": True }, + "ValidateInstanceInContextHoudini": { + "enabled": True, + "optional": True, + "active": True + }, "ValidateMeshIsStatic": { "enabled": True, "optional": True, diff --git a/server_addon/houdini/server/version.py b/server_addon/houdini/server/version.py deleted file mode 100644 index 5635676f6b..0000000000 --- a/server_addon/houdini/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.2.11" diff --git a/server_addon/max/package.py b/server_addon/max/package.py new file mode 100644 index 0000000000..fb1f1b3050 --- /dev/null +++ b/server_addon/max/package.py @@ -0,0 +1,3 @@ +name = "max" +title = "Max" +version = "0.1.7" diff --git a/server_addon/max/server/__init__.py b/server_addon/max/server/__init__.py index 31c694a084..d03b29d249 100644 --- a/server_addon/max/server/__init__.py +++ b/server_addon/max/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import MaxSettings, DEFAULT_VALUES class MaxAddon(BaseServerAddon): - name = "max" - title = "Max" - version = __version__ settings_model: Type[MaxSettings] = MaxSettings async def get_default_settings(self): diff --git a/server_addon/max/server/settings/publishers.py b/server_addon/max/server/settings/publishers.py index 5e28c1b467..5e1b348d92 100644 --- a/server_addon/max/server/settings/publishers.py +++ b/server_addon/max/server/settings/publishers.py @@ -49,6 +49,20 @@ class FamilyMappingItemModel(BaseSettingsModel): ) +class ValidateModelNameModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + regex: str = SettingsField( + "(.*)_(?P.*)_(GEO)", + title="Validation regex", + description=( + "Regex for validating model name. You can use named " + " capturing groups:(?P.*) for Asset name" + ) + ) + + class ValidateLoadedPluginModel(BaseSettingsModel): enabled: bool = SettingsField(title="Enabled") optional: bool = SettingsField(title="Optional") @@ -65,10 +79,14 @@ class BasicValidateModel(BaseSettingsModel): class PublishersModel(BaseSettingsModel): + ValidateInstanceInContext: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Instance In Context", + section="Validators" + ) ValidateFrameRange: BasicValidateModel = SettingsField( default_factory=BasicValidateModel, - title="Validate Frame Range", - section="Validators" + title="Validate Frame Range" ) ValidateAttributes: ValidateAttributesModel = SettingsField( default_factory=ValidateAttributesModel, @@ -82,10 +100,26 @@ class PublishersModel(BaseSettingsModel): "the system automatically skips checking it" ) ) + ValidateNoAnimation: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate No Animation" + ) ValidateLoadedPlugin: ValidateLoadedPluginModel = SettingsField( default_factory=ValidateLoadedPluginModel, title="Validate Loaded Plugin" ) + ValidateMeshHasUVs: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Mesh Has UVs" + ) + ValidateModelName: ValidateModelNameModel = SettingsField( + default_factory=ValidateModelNameModel, + title="Validate Model Name" + ) + ValidateRenderPasses: BasicValidateModel = SettingsField( + default_factory=BasicValidateModel, + title="Validate Render Passes" + ) ExtractModelObj: BasicValidateModel = SettingsField( default_factory=BasicValidateModel, title="Extract OBJ", @@ -110,6 +144,11 @@ class PublishersModel(BaseSettingsModel): DEFAULT_PUBLISH_SETTINGS = { + "ValidateInstanceInContext": { + "enabled": True, + "optional": True, + "active": True + }, "ValidateFrameRange": { "enabled": True, "optional": True, @@ -129,11 +168,32 @@ DEFAULT_PUBLISH_SETTINGS = { "nearclip": 1.0, "farclip": 1000.0 }, + "ValidateModelName": { + "enabled": True, + "optional": True, + "active": False, + "regex": "(.*)_(?P.*)_(GEO)" + }, "ValidateLoadedPlugin": { "enabled": False, "optional": True, "family_plugins_mapping": [] }, + "ValidateMeshHasUVs": { + "enabled": True, + "optional": True, + "active": False + }, + "ValidateNoAnimation": { + "enabled": True, + "optional": True, + "active": False, + }, + "ValidateRenderPasses": { + "enabled": True, + "optional": False, + "active": True + }, "ExtractModelObj": { "enabled": True, "optional": True, diff --git a/server_addon/max/server/version.py b/server_addon/max/server/version.py deleted file mode 100644 index 1276d0254f..0000000000 --- a/server_addon/max/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.5" diff --git a/server_addon/maya/package.py b/server_addon/maya/package.py new file mode 100644 index 0000000000..00f28d901e --- /dev/null +++ b/server_addon/maya/package.py @@ -0,0 +1,3 @@ +name = "maya" +title = "Maya" +version = "0.1.16" diff --git a/server_addon/maya/server/__init__.py b/server_addon/maya/server/__init__.py index 8784427dcf..6dda2cdd77 100644 --- a/server_addon/maya/server/__init__.py +++ b/server_addon/maya/server/__init__.py @@ -2,13 +2,9 @@ from ayon_server.addons import BaseServerAddon from .settings.main import MayaSettings, DEFAULT_MAYA_SETTING -from .version import __version__ class MayaAddon(BaseServerAddon): - name = "maya" - title = "Maya" - version = __version__ settings_model = MayaSettings async def get_default_settings(self): diff --git a/server_addon/maya/server/settings/loaders.py b/server_addon/maya/server/settings/loaders.py index 418a7046ae..2f104d2858 100644 --- a/server_addon/maya/server/settings/loaders.py +++ b/server_addon/maya/server/settings/loaders.py @@ -1,5 +1,9 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGB_float, ColorRGBA_uint8 +from ayon_server.types import ColorRGBA_uint8 + + +class LoaderEnabledModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") class ColorsSetting(BaseSettingsModel): @@ -94,10 +98,22 @@ class ReferenceLoaderModel(BaseSettingsModel): class ImportLoaderModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") namespace: str = SettingsField(title="Namespace") group_name: str = SettingsField(title="Group name") +class YetiRigLoaderModel(LoaderEnabledModel): + create_cache_instance_on_load: bool = SettingsField( + title="Create Yeti Cache instance on load", + description=( + "When enabled, upon loading a Yeti Rig product a new Yeti cache " + "instance is automatically created as preparation to publishing " + "the output directly." + ) + ) + + class LoadersModel(BaseSettingsModel): colors: ColorsSetting = SettingsField( default_factory=ColorsSetting, @@ -113,6 +129,89 @@ class LoadersModel(BaseSettingsModel): title="Import Loader" ) + # Enable/disable loaders + ArnoldStandinLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Arnold Standin Loader" + ) + AssemblyLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Assembly Loader" + ) + AudioLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Audio Loader" + ) + GpuCacheLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="GPU Cache Loader" + ) + FileNodeLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="File Node (Image) Loader" + ) + ImagePlaneLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Image Plane Loader" + ) + LookLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Look Loader" + ) + MatchmoveLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Matchmove Loader" + ) + MultiverseUsdLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Multiverse USD Loader" + ) + MultiverseUsdOverLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Multiverse USD Override Loader" + ) + RedshiftProxyLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Redshift Proxy Loader" + ) + RenderSetupLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Render Setup Loader" + ) + LoadVDBtoArnold: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VDB to Arnold Loader" + ) + LoadVDBtoRedShift: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VDB to Redshift Loader" + ) + LoadVDBtoVRay: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VDB to V-Ray Loader" + ) + VRayProxyLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Vray Proxy Loader" + ) + VRaySceneLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="VrayScene Loader" + ) + XgenLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Xgen Loader" + ) + YetiCacheLoader: LoaderEnabledModel = SettingsField( + default_factory=LoaderEnabledModel, + title="Yeti Cache Loader" + ) + YetiRigLoader: YetiRigLoaderModel = SettingsField( + default_factory=YetiRigLoaderModel, + title="Yeti Rig Loader" + ) + + DEFAULT_LOADERS_SETTING = { "colors": { "model": [209, 132, 30, 1.0], @@ -154,8 +253,32 @@ DEFAULT_LOADERS_SETTING = { "display_handle": True }, "import_loader": { + "enabled": True, "namespace": "{folder[name]}_{product[name]}_##_", "group_name": "_GRP", "display_handle": True - } + }, + "ArnoldStandinLoader": {"enabled": True}, + "AssemblyLoader": {"enabled": True}, + "AudioLoader": {"enabled": True}, + "FileNodeLoader": {"enabled": True}, + "GpuCacheLoader": {"enabled": True}, + "ImagePlaneLoader": {"enabled": True}, + "LookLoader": {"enabled": True}, + "MatchmoveLoader": {"enabled": True}, + "MultiverseUsdLoader": {"enabled": True}, + "MultiverseUsdOverLoader": {"enabled": True}, + "RedshiftProxyLoader": {"enabled": True}, + "RenderSetupLoader": {"enabled": True}, + "LoadVDBtoArnold": {"enabled": True}, + "LoadVDBtoRedShift": {"enabled": True}, + "LoadVDBtoVRay": {"enabled": True}, + "VRayProxyLoader": {"enabled": True}, + "VRaySceneLoader": {"enabled": True}, + "XgenLoader": {"enabled": True}, + "YetiCacheLoader": {"enabled": True}, + "YetiRigLoader": { + "enabled": True, + "create_cache_instance_on_load": True + }, } diff --git a/server_addon/maya/server/settings/main.py b/server_addon/maya/server/settings/main.py index f7f62e219d..a4562f54d7 100644 --- a/server_addon/maya/server/settings/main.py +++ b/server_addon/maya/server/settings/main.py @@ -30,6 +30,15 @@ class ExtMappingItemModel(BaseSettingsModel): class MayaSettings(BaseSettingsModel): """Maya Project Settings.""" + use_cbid_workflow: bool = SettingsField( + True, title="Use cbId workflow", + description=( + "When enabled, a per node `cbId` identifier will be created and " + "validated for many product types. This is then used for look " + "publishing and many others. By disabling this, the `cbId` " + "attribute will still be created on scene save but it will not " + "be validated.")) + open_workfile_post_initialization: bool = SettingsField( True, title="Open Workfile Post Initialization") explicit_plugins_loading: ExplicitPluginsLoadingModel = SettingsField( @@ -88,6 +97,7 @@ DEFAULT_MEL_WORKSPACE_SETTINGS = "\n".join(( )) DEFAULT_MAYA_SETTING = { + "use_cbid_workflow": True, "open_workfile_post_initialization": True, "explicit_plugins_loading": DEFAULT_EXPLITCIT_PLUGINS_LOADING_SETTINGS, "imageio": DEFAULT_IMAGEIO_SETTINGS, diff --git a/server_addon/maya/server/settings/publish_playblast.py b/server_addon/maya/server/settings/publish_playblast.py index 39f48bacbe..d513a43e99 100644 --- a/server_addon/maya/server/settings/publish_playblast.py +++ b/server_addon/maya/server/settings/publish_playblast.py @@ -6,7 +6,7 @@ from ayon_server.settings import ( ensure_unique_names, task_types_enum, ) -from ayon_server.types import ColorRGBA_uint8, ColorRGB_float +from ayon_server.types import ColorRGBA_uint8 def hardware_falloff_enum(): diff --git a/server_addon/maya/server/settings/publishers.py b/server_addon/maya/server/settings/publishers.py index 3a6de2eb44..27288053a2 100644 --- a/server_addon/maya/server/settings/publishers.py +++ b/server_addon/maya/server/settings/publishers.py @@ -299,6 +299,16 @@ class ExtractAlembicModel(BaseSettingsModel): families: list[str] = SettingsField( default_factory=list, title="Families") + bake_attributes: list[str] = SettingsField( + default_factory=list, title="Bake Attributes", + description="List of attributes that will be included in the alembic " + "export.", + ) + bake_attribute_prefixes: list[str] = SettingsField( + default_factory=list, title="Bake Attribute Prefixes", + description="List of attribute prefixes for attributes that will be " + "included in the alembic export.", + ) class ExtractObjModel(BaseSettingsModel): @@ -306,6 +316,12 @@ class ExtractObjModel(BaseSettingsModel): optional: bool = SettingsField(title="Optional") +class ExtractModelModel(BaseSettingsModel): + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") + + class ExtractMayaSceneRawModel(BaseSettingsModel): """Add loaded instances to those published families:""" enabled: bool = SettingsField(title="ExtractMayaSceneRaw") @@ -315,14 +331,13 @@ class ExtractMayaSceneRawModel(BaseSettingsModel): class ExtractCameraAlembicModel(BaseSettingsModel): - """ - List of attributes that will be added to the baked alembic camera. Needs to be written in python list syntax. - """ enabled: bool = SettingsField(title="ExtractCameraAlembic") optional: bool = SettingsField(title="Optional") active: bool = SettingsField(title="Active") bake_attributes: str = SettingsField( - "[]", title="Base Attributes", widget="textarea" + "[]", title="Bake Attributes", widget="textarea", + description="List of attributes that will be included in the alembic " + "camera export. Needs to be written as a JSON list.", ) @validator("bake_attributes") @@ -363,7 +378,9 @@ class ExtractLookModel(BaseSettingsModel): class ExtractGPUCacheModel(BaseSettingsModel): - enabled: bool = True + enabled: bool = SettingsField(title="Enabled") + optional: bool = SettingsField(title="Optional") + active: bool = SettingsField(title="Active") families: list[str] = SettingsField(default_factory=list, title="Families") step: float = SettingsField(1.0, ge=1.0, title="Step") stepSave: int = SettingsField(1, ge=1, title="Step Save") @@ -753,14 +770,6 @@ class PublishersModel(BaseSettingsModel): default_factory=BasicValidateModel, title="Validate Ass Relative Paths" ) - ValidateInstancerContent: BasicValidateModel = SettingsField( - default_factory=BasicValidateModel, - title="Validate Instancer Content" - ) - ValidateInstancerFrameRanges: BasicValidateModel = SettingsField( - default_factory=BasicValidateModel, - title="Validate Instancer Cache Frame Ranges" - ) ValidateNoDefaultCameras: BasicValidateModel = SettingsField( default_factory=BasicValidateModel, title="Validate No Default Cameras" @@ -798,6 +807,10 @@ class PublishersModel(BaseSettingsModel): default_factory=ExtractGPUCacheModel, title="Extract GPU Cache", ) + ExtractModel: ExtractModelModel = SettingsField( + default_factory=ExtractModelModel, + title="Extract Model (Maya Scene)" + ) DEFAULT_SUFFIX_NAMING = { @@ -1193,7 +1206,9 @@ DEFAULT_PUBLISH_SETTINGS = { "pointcache", "model", "vrayproxy.alembic" - ] + ], + "bake_attributes": [], + "bake_attribute_prefixes": [] }, "ExtractObj": { "enabled": False, @@ -1300,16 +1315,6 @@ DEFAULT_PUBLISH_SETTINGS = { "optional": False, "active": True }, - "ValidateInstancerContent": { - "enabled": True, - "optional": False, - "active": True - }, - "ValidateInstancerFrameRanges": { - "enabled": True, - "optional": False, - "active": True - }, "ValidateNoDefaultCameras": { "enabled": True, "optional": False, @@ -1348,6 +1353,8 @@ DEFAULT_PUBLISH_SETTINGS = { }, "ExtractGPUCache": { "enabled": False, + "optional": False, + "active": True, "families": [ "model", "animation", @@ -1360,5 +1367,10 @@ DEFAULT_PUBLISH_SETTINGS = { "optimizeAnimationsForMotionBlur": True, "writeMaterials": True, "useBaseTessellation": True + }, + "ExtractModel": { + "enabled": True, + "optional": True, + "active": True, } } diff --git a/server_addon/maya/server/settings/render_settings.py b/server_addon/maya/server/settings/render_settings.py index 577049b42f..bc476ec49c 100644 --- a/server_addon/maya/server/settings/render_settings.py +++ b/server_addon/maya/server/settings/render_settings.py @@ -355,7 +355,7 @@ class RedshiftSettingsModel(BaseSettingsModel): ) additional_options: list[AdditionalOptionsModel] = SettingsField( default_factory=list, - title="Additional Vray Options", + title="Additional Redshift Options", description=( "Add additional options - put attribute and value, like " "reflectionMaxTraceDepth and 3" diff --git a/server_addon/maya/server/settings/scriptsmenu.py b/server_addon/maya/server/settings/scriptsmenu.py index d01dff1621..7b0ba7d831 100644 --- a/server_addon/maya/server/settings/scriptsmenu.py +++ b/server_addon/maya/server/settings/scriptsmenu.py @@ -1,3 +1,7 @@ +import json + +from pydantic import validator +from ayon_server.exceptions import BadRequestException from ayon_server.settings import BaseSettingsModel, SettingsField @@ -14,19 +18,60 @@ class ScriptsmenuSubmodel(BaseSettingsModel): ) +_definition_mode_type = [ + {"value": "definition", "label": "Menu Builder"}, + {"value": "definition_json", "label": "Raw JSON (advanced)"} +] + + class ScriptsmenuModel(BaseSettingsModel): + """Add a custom scripts menu to Maya""" _isGroup = True name: str = SettingsField(title="Menu Name") + + definition_type: str = SettingsField( + title="Define menu using", + description="Choose the way to define the custom scripts menu " + "via settings", + enum_resolver=lambda: _definition_mode_type, + conditionalEnum=True, + default="definition" + ) definition: list[ScriptsmenuSubmodel] = SettingsField( default_factory=list, title="Menu Definition", description="Scriptmenu Items Definition" ) + definition_json: str = SettingsField( + "[]", title="Menu Definition JSON", widget="textarea", + description=( + "Define the custom tools menu using a JSON list. " + "For more details on the JSON format, see " + "[here](https://github.com/Colorbleed/scriptsmenu?tab=readme-ov-file#configuration)." # noqa: E501 + ) + ) + + @validator("definition_json") + def validate_json(cls, value): + if not value.strip(): + return "[]" + try: + converted_value = json.loads(value) + success = isinstance(converted_value, list) + except json.JSONDecodeError: + success = False + + if not success: + raise BadRequestException( + "The definition can't be parsed as json list object" + ) + return value DEFAULT_SCRIPTSMENU_SETTINGS = { "name": "Custom Tools", + "definition_type": "definition", "definition": [ { "type": "action", @@ -39,5 +84,6 @@ DEFAULT_SCRIPTSMENU_SETTINGS = { "shader" ] } - ] + ], + "definition_json": "[]" } diff --git a/server_addon/maya/server/version.py b/server_addon/maya/server/version.py deleted file mode 100644 index 8202425a2d..0000000000 --- a/server_addon/maya/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.9" diff --git a/server_addon/nuke/package.py b/server_addon/nuke/package.py new file mode 100644 index 0000000000..9630c370bc --- /dev/null +++ b/server_addon/nuke/package.py @@ -0,0 +1,3 @@ +name = "nuke" +title = "Nuke" +version = "0.1.10" diff --git a/server_addon/nuke/server/__init__.py b/server_addon/nuke/server/__init__.py index 032ceea5fb..aeb5e36675 100644 --- a/server_addon/nuke/server/__init__.py +++ b/server_addon/nuke/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import NukeSettings, DEFAULT_VALUES class NukeAddon(BaseServerAddon): - name = "nuke" - title = "Nuke" - version = __version__ settings_model: Type[NukeSettings] = NukeSettings async def get_default_settings(self): diff --git a/server_addon/nuke/server/settings/loader_plugins.py b/server_addon/nuke/server/settings/loader_plugins.py index a5c3315fd4..531ea8d986 100644 --- a/server_addon/nuke/server/settings/loader_plugins.py +++ b/server_addon/nuke/server/settings/loader_plugins.py @@ -42,7 +42,7 @@ class LoadClipModel(BaseSettingsModel): ) -class LoaderPuginsModel(BaseSettingsModel): +class LoaderPluginsModel(BaseSettingsModel): LoadImage: LoadImageModel = SettingsField( default_factory=LoadImageModel, title="Load Image" diff --git a/server_addon/nuke/server/settings/main.py b/server_addon/nuke/server/settings/main.py index 2b269f1fce..1fd347cc21 100644 --- a/server_addon/nuke/server/settings/main.py +++ b/server_addon/nuke/server/settings/main.py @@ -1,7 +1,6 @@ from ayon_server.settings import ( BaseSettingsModel, SettingsField, - ensure_unique_names ) from .general import ( @@ -29,11 +28,11 @@ from .create_plugins import ( DEFAULT_CREATE_SETTINGS ) from .publish_plugins import ( - PublishPuginsModel, + PublishPluginsModel, DEFAULT_PUBLISH_PLUGIN_SETTINGS ) from .loader_plugins import ( - LoaderPuginsModel, + LoaderPluginsModel, DEFAULT_LOADER_PLUGINS_SETTINGS ) from .workfile_builder import ( @@ -76,13 +75,13 @@ class NukeSettings(BaseSettingsModel): title="Creator Plugins", ) - publish: PublishPuginsModel = SettingsField( - default_factory=PublishPuginsModel, + publish: PublishPluginsModel = SettingsField( + default_factory=PublishPluginsModel, title="Publish Plugins", ) - load: LoaderPuginsModel = SettingsField( - default_factory=LoaderPuginsModel, + load: LoaderPluginsModel = SettingsField( + default_factory=LoaderPluginsModel, title="Loader Plugins", ) diff --git a/server_addon/nuke/server/settings/publish_plugins.py b/server_addon/nuke/server/settings/publish_plugins.py index 02ee9b3bab..d5b05d8715 100644 --- a/server_addon/nuke/server/settings/publish_plugins.py +++ b/server_addon/nuke/server/settings/publish_plugins.py @@ -56,7 +56,7 @@ class CollectInstanceDataModel(BaseSettingsModel): sync_workfile_version_on_product_types: list[str] = SettingsField( default_factory=list, enum_resolver=nuke_product_types_enum, - title="Sync workfile versions for familes" + title="Product types" ) @@ -219,7 +219,7 @@ class IncrementScriptVersionModel(BaseSettingsModel): active: bool = SettingsField(title="Active") -class PublishPuginsModel(BaseSettingsModel): +class PublishPluginsModel(BaseSettingsModel): CollectInstanceData: CollectInstanceDataModel = SettingsField( title="Collect Instance Version", default_factory=CollectInstanceDataModel, diff --git a/server_addon/nuke/server/version.py b/server_addon/nuke/server/version.py deleted file mode 100644 index c11f861afb..0000000000 --- a/server_addon/nuke/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.9" diff --git a/server_addon/photoshop/package.py b/server_addon/photoshop/package.py new file mode 100644 index 0000000000..25615529d1 --- /dev/null +++ b/server_addon/photoshop/package.py @@ -0,0 +1,3 @@ +name = "photoshop" +title = "Photoshop" +version = "0.1.2" diff --git a/server_addon/photoshop/server/__init__.py b/server_addon/photoshop/server/__init__.py index 3a45f7a809..86d1025a2d 100644 --- a/server_addon/photoshop/server/__init__.py +++ b/server_addon/photoshop/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon from .settings import PhotoshopSettings, DEFAULT_PHOTOSHOP_SETTING -from .version import __version__ class Photoshop(BaseServerAddon): - name = "photoshop" - title = "Photoshop" - version = __version__ - settings_model = PhotoshopSettings async def get_default_settings(self): diff --git a/server_addon/photoshop/server/version.py b/server_addon/photoshop/server/version.py deleted file mode 100644 index df0c92f1e2..0000000000 --- a/server_addon/photoshop/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.2" diff --git a/server_addon/resolve/package.py b/server_addon/resolve/package.py new file mode 100644 index 0000000000..cf92413bce --- /dev/null +++ b/server_addon/resolve/package.py @@ -0,0 +1,3 @@ +name = "resolve" +title = "DaVinci Resolve" +version = "0.1.0" diff --git a/server_addon/resolve/server/__init__.py b/server_addon/resolve/server/__init__.py index a84180d0f5..35d2db19e4 100644 --- a/server_addon/resolve/server/__init__.py +++ b/server_addon/resolve/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import ResolveSettings, DEFAULT_VALUES class ResolveAddon(BaseServerAddon): - name = "resolve" - title = "DaVinci Resolve" - version = __version__ settings_model: Type[ResolveSettings] = ResolveSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/resolve/server/settings.py b/server_addon/resolve/server/settings.py index dcdb2f1b27..d9cbb98340 100644 --- a/server_addon/resolve/server/settings.py +++ b/server_addon/resolve/server/settings.py @@ -69,7 +69,7 @@ class CreateShotClipModels(BaseSettingsModel): ) -class CreatorPuginsModel(BaseSettingsModel): +class CreatorPluginsModel(BaseSettingsModel): CreateShotClip: CreateShotClipModels = SettingsField( default_factory=CreateShotClipModels, title="Create Shot Clip" @@ -84,8 +84,8 @@ class ResolveSettings(BaseSettingsModel): default_factory=ResolveImageIOModel, title="Color Management (ImageIO)" ) - create: CreatorPuginsModel = SettingsField( - default_factory=CreatorPuginsModel, + create: CreatorPluginsModel = SettingsField( + default_factory=CreatorPluginsModel, title="Creator plugins", ) diff --git a/server_addon/resolve/server/version.py b/server_addon/resolve/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/resolve/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/royal_render/server/version.py b/server_addon/royal_render/server/version.py deleted file mode 100644 index 485f44ac21..0000000000 --- a/server_addon/royal_render/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.1" diff --git a/server_addon/royalrender/package.py b/server_addon/royalrender/package.py new file mode 100644 index 0000000000..1fdea4abbb --- /dev/null +++ b/server_addon/royalrender/package.py @@ -0,0 +1,3 @@ +name = "royalrender" +title = "Royal Render" +version = "0.1.1" diff --git a/server_addon/royal_render/server/__init__.py b/server_addon/royalrender/server/__init__.py similarity index 77% rename from server_addon/royal_render/server/__init__.py rename to server_addon/royalrender/server/__init__.py index c5f0aafa00..5b10678136 100644 --- a/server_addon/royal_render/server/__init__.py +++ b/server_addon/royalrender/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import RoyalRenderSettings, DEFAULT_VALUES class RoyalRenderAddon(BaseServerAddon): - name = "royalrender" - version = __version__ - title = "Royal Render" settings_model: Type[RoyalRenderSettings] = RoyalRenderSettings async def get_default_settings(self): diff --git a/server_addon/royal_render/server/settings.py b/server_addon/royalrender/server/settings.py similarity index 100% rename from server_addon/royal_render/server/settings.py rename to server_addon/royalrender/server/settings.py diff --git a/server_addon/substancepainter/package.py b/server_addon/substancepainter/package.py new file mode 100644 index 0000000000..d445b0059f --- /dev/null +++ b/server_addon/substancepainter/package.py @@ -0,0 +1,3 @@ +name = "substancepainter" +title = "Substance Painter" +version = "0.1.1" diff --git a/server_addon/substancepainter/server/__init__.py b/server_addon/substancepainter/server/__init__.py index 2bf808d508..f6cd51e610 100644 --- a/server_addon/substancepainter/server/__init__.py +++ b/server_addon/substancepainter/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import SubstancePainterSettings, DEFAULT_SPAINTER_SETTINGS class SubstancePainterAddon(BaseServerAddon): - name = "substancepainter" - title = "Substance Painter" - version = __version__ settings_model: Type[SubstancePainterSettings] = SubstancePainterSettings async def get_default_settings(self): diff --git a/server_addon/substancepainter/server/settings/load_plugins.py b/server_addon/substancepainter/server/settings/load_plugins.py new file mode 100644 index 0000000000..e6b2fd86c3 --- /dev/null +++ b/server_addon/substancepainter/server/settings/load_plugins.py @@ -0,0 +1,122 @@ +from ayon_server.settings import BaseSettingsModel, SettingsField + + +def normal_map_format_enum(): + return [ + {"label": "DirectX", "value": "NormalMapFormat.DirectX"}, + {"label": "OpenGL", "value": "NormalMapFormat.OpenGL"}, + ] + + +def tangent_space_enum(): + return [ + {"label": "Per Fragment", "value": "TangentSpace.PerFragment"}, + {"label": "Per Vertex", "value": "TangentSpace.PerVertex"}, + ] + + +def uv_workflow_enum(): + return [ + {"label": "Default", "value": "ProjectWorkflow.Default"}, + {"label": "UV Tile", "value": "ProjectWorkflow.UVTile"}, + {"label": "Texture Set Per UV Tile", + "value": "ProjectWorkflow.TextureSetPerUVTile"} + ] + + +def document_resolution_enum(): + return [ + {"label": "128", "value": 128}, + {"label": "256", "value": 256}, + {"label": "512", "value": 512}, + {"label": "1024", "value": 1024}, + {"label": "2048", "value": 2048}, + {"label": "4096", "value": 4096} + ] + + +class ProjectTemplatesModel(BaseSettingsModel): + _layout = "expanded" + name: str = SettingsField("default", title="Template Name") + default_texture_resolution: int = SettingsField( + 1024, enum_resolver=document_resolution_enum, + title="Document Resolution", + description=("Set texture resolution when " + "creating new project.") + ) + import_cameras: bool = SettingsField( + True, title="Import Cameras", + description="Import cameras from the mesh file.") + normal_map_format: str = SettingsField( + "DirectX", enum_resolver=normal_map_format_enum, + title="Normal Map Format", + description=("Set normal map format when " + "creating new project.") + ) + project_workflow: str = SettingsField( + "Default", enum_resolver=uv_workflow_enum, + title="UV Tile Settings", + description=("Set UV workflow when " + "creating new project.") + ) + tangent_space_mode: str = SettingsField( + "PerFragment", enum_resolver=tangent_space_enum, + title="Tangent Space", + description=("An option to compute tangent space " + "when creating new project.") + ) + preserve_strokes: bool = SettingsField( + True, title="Preserve Strokes", + description=("Preserve strokes positions on mesh.\n" + "(only relevant when loading into " + "existing project)") + ) + + +class ProjectTemplateSettingModel(BaseSettingsModel): + project_templates: list[ProjectTemplatesModel] = SettingsField( + default_factory=ProjectTemplatesModel, + title="Project Templates" + ) + + +class LoadersModel(BaseSettingsModel): + SubstanceLoadProjectMesh: ProjectTemplateSettingModel = SettingsField( + default_factory=ProjectTemplateSettingModel, + title="Load Mesh" + ) + + +DEFAULT_LOADER_SETTINGS = { + "SubstanceLoadProjectMesh": { + "project_templates": [ + { + "name": "2K(Default)", + "default_texture_resolution": 2048, + "import_cameras": True, + "normal_map_format": "NormalMapFormat.DirectX", + "project_workflow": "ProjectWorkflow.Default", + "tangent_space_mode": "TangentSpace.PerFragment", + "preserve_strokes": True + }, + { + "name": "2K(UV tile)", + "default_texture_resolution": 2048, + "import_cameras": True, + "normal_map_format": "NormalMapFormat.DirectX", + "project_workflow": "ProjectWorkflow.UVTile", + "tangent_space_mode": "TangentSpace.PerFragment", + "preserve_strokes": True + }, + { + "name": "4K(Custom)", + "default_texture_resolution": 4096, + "import_cameras": True, + "normal_map_format": "NormalMapFormat.OpenGL", + "project_workflow": "ProjectWorkflow.UVTile", + "tangent_space_mode": "TangentSpace.PerFragment", + "preserve_strokes": True + } + ] + } +} diff --git a/server_addon/substancepainter/server/settings/main.py b/server_addon/substancepainter/server/settings/main.py index f80fa9fe1e..93523fd650 100644 --- a/server_addon/substancepainter/server/settings/main.py +++ b/server_addon/substancepainter/server/settings/main.py @@ -1,5 +1,6 @@ from ayon_server.settings import BaseSettingsModel, SettingsField from .imageio import ImageIOSettings, DEFAULT_IMAGEIO_SETTINGS +from .load_plugins import LoadersModel, DEFAULT_LOADER_SETTINGS class ShelvesSettingsModel(BaseSettingsModel): @@ -17,9 +18,12 @@ class SubstancePainterSettings(BaseSettingsModel): default_factory=list, title="Shelves" ) + load: LoadersModel = SettingsField( + default_factory=DEFAULT_LOADER_SETTINGS, title="Loaders") DEFAULT_SPAINTER_SETTINGS = { "imageio": DEFAULT_IMAGEIO_SETTINGS, - "shelves": [] + "shelves": [], + "load": DEFAULT_LOADER_SETTINGS, } diff --git a/server_addon/substancepainter/server/version.py b/server_addon/substancepainter/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/substancepainter/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/server_addon/timers_manager/package.py b/server_addon/timers_manager/package.py new file mode 100644 index 0000000000..bd6b81b4b7 --- /dev/null +++ b/server_addon/timers_manager/package.py @@ -0,0 +1,3 @@ +name = "timers_manager" +title = "Timers Manager" +version = "0.1.1" diff --git a/server_addon/timers_manager/server/__init__.py b/server_addon/timers_manager/server/__init__.py index 29f9d47370..32e83d295c 100644 --- a/server_addon/timers_manager/server/__init__.py +++ b/server_addon/timers_manager/server/__init__.py @@ -2,12 +2,8 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import TimersManagerSettings class TimersManagerAddon(BaseServerAddon): - name = "timers_manager" - version = __version__ - title = "Timers Manager" settings_model: Type[TimersManagerSettings] = TimersManagerSettings diff --git a/server_addon/timers_manager/server/version.py b/server_addon/timers_manager/server/version.py deleted file mode 100644 index 485f44ac21..0000000000 --- a/server_addon/timers_manager/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.1" diff --git a/server_addon/traypublisher/package.py b/server_addon/traypublisher/package.py new file mode 100644 index 0000000000..4ca8ae9fd3 --- /dev/null +++ b/server_addon/traypublisher/package.py @@ -0,0 +1,3 @@ +name = "traypublisher" +title = "TrayPublisher" +version = "0.1.4" diff --git a/server_addon/traypublisher/server/__init__.py b/server_addon/traypublisher/server/__init__.py index e6f079609f..830f325ac0 100644 --- a/server_addon/traypublisher/server/__init__.py +++ b/server_addon/traypublisher/server/__init__.py @@ -1,14 +1,9 @@ from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import TraypublisherSettings, DEFAULT_TRAYPUBLISHER_SETTING class Traypublisher(BaseServerAddon): - name = "traypublisher" - title = "TrayPublisher" - version = __version__ - settings_model = TraypublisherSettings async def get_default_settings(self): diff --git a/server_addon/traypublisher/server/settings/creator_plugins.py b/server_addon/traypublisher/server/settings/creator_plugins.py index bf66d9a088..1ff14002aa 100644 --- a/server_addon/traypublisher/server/settings/creator_plugins.py +++ b/server_addon/traypublisher/server/settings/creator_plugins.py @@ -1,4 +1,7 @@ +from pydantic import validator from ayon_server.settings import BaseSettingsModel, SettingsField +from ayon_server.settings.validators import ensure_unique_names +from ayon_server.exceptions import BadRequestException class BatchMovieCreatorPlugin(BaseSettingsModel): @@ -22,11 +25,139 @@ class BatchMovieCreatorPlugin(BaseSettingsModel): ) +class ColumnItemModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + name: str = SettingsField( + title="Name", + default="" + ) + + type: str = SettingsField( + title="Type", + default="" + ) + + default: str = SettingsField( + title="Default", + default="" + ) + + required_column: bool = SettingsField( + title="Required Column", + default=False + ) + + validation_pattern: str = SettingsField( + title="Validation Regex Pattern", + default="^(.*)$" + ) + + +class ColumnConfigModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + csv_delimiter: str = SettingsField( + title="CSV delimiter", + default="," + ) + + columns: list[ColumnItemModel] = SettingsField( + title="Columns", + default_factory=list + ) + + @validator("columns") + def validate_unique_outputs(cls, value): + ensure_unique_names(value) + return value + + +class RepresentationItemModel(BaseSettingsModel): + """Allows to publish multiple video files in one go. + + Name of matching asset is parsed from file names + ('asset.mov', 'asset_v001.mov', 'my_asset_to_publish.mov') + """ + + name: str = SettingsField( + title="Name", + default="" + ) + + extensions: list[str] = SettingsField( + title="Extensions", + default_factory=list + ) + + @validator("extensions") + def validate_extension(cls, value): + for ext in value: + if not ext.startswith("."): + raise BadRequestException(f"Extension must start with '.': {ext}") + return value + + +class RepresentationConfigModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + tags_delimiter: str = SettingsField( + title="Tags delimiter", + default=";" + ) + + default_tags: list[str] = SettingsField( + title="Default tags", + default_factory=list + ) + + representations: list[RepresentationItemModel] = SettingsField( + title="Representations", + default_factory=list + ) + + @validator("representations") + def validate_unique_outputs(cls, value): + ensure_unique_names(value) + return value + + +class IngestCSVPluginModel(BaseSettingsModel): + """Allows to publish multiple video files in one go.
Name of matching + asset is parsed from file names ('asset.mov', 'asset_v001.mov', + 'my_asset_to_publish.mov')""" + + enabled: bool = SettingsField( + title="Enabled", + default=False + ) + + columns_config: ColumnConfigModel = SettingsField( + title="Columns config", + default_factory=ColumnConfigModel + ) + + representations_config: RepresentationConfigModel = SettingsField( + title="Representations config", + default_factory=RepresentationConfigModel + ) + + class TrayPublisherCreatePluginsModel(BaseSettingsModel): BatchMovieCreator: BatchMovieCreatorPlugin = SettingsField( title="Batch Movie Creator", default_factory=BatchMovieCreatorPlugin ) + IngestCSV: IngestCSVPluginModel = SettingsField( + title="Ingest CSV", + default_factory=IngestCSVPluginModel + ) DEFAULT_CREATORS = { @@ -41,4 +172,170 @@ DEFAULT_CREATORS = { ".mov" ] }, + "IngestCSV": { + "enabled": True, + "columns_config": { + "csv_delimiter": ",", + "columns": [ + { + "name": "File Path", + "type": "text", + "default": "", + "required_column": True, + "validation_pattern": "^([a-z0-9#._\\/]*)$" + }, + { + "name": "Folder Path", + "type": "text", + "default": "", + "required_column": True, + "validation_pattern": "^([a-zA-Z0-9_\\/]*)$" + }, + { + "name": "Task Name", + "type": "text", + "default": "", + "required_column": True, + "validation_pattern": "^(.*)$" + }, + { + "name": "Product Type", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Variant", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Version", + "type": "number", + "default": 1, + "required_column": True, + "validation_pattern": "^(\\d{1,3})$" + }, + { + "name": "Version Comment", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Version Thumbnail", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^([a-zA-Z0-9#._\\/]*)$" + }, + { + "name": "Frame Start", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d{1,8})$" + }, + { + "name": "Frame End", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d{1,8})$" + }, + { + "name": "Handle Start", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d)$" + }, + { + "name": "Handle End", + "type": "number", + "default": 0, + "required_column": True, + "validation_pattern": "^(\\d)$" + }, + { + "name": "FPS", + "type": "decimal", + "default": 0.0, + "required_column": True, + "validation_pattern": "^[0-9]*\\.[0-9]+$|^[0-9]+$" + }, + { + "name": "Slate Exists", + "type": "bool", + "default": True, + "required_column": False, + "validation_pattern": "(True|False)" + }, + { + "name": "Representation", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Representation Colorspace", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + }, + { + "name": "Representation Tags", + "type": "text", + "default": "", + "required_column": False, + "validation_pattern": "^(.*)$" + } + ] + }, + "representations_config": { + "tags_delimiter": ";", + "default_tags": [ + "review" + ], + "representations": [ + { + "name": "preview", + "extensions": [ + ".mp4", + ".mov" + ] + }, + { + "name": "exr", + "extensions": [ + ".exr" + ] + }, + { + "name": "edit", + "extensions": [ + ".mov" + ] + }, + { + "name": "review", + "extensions": [ + ".mov" + ] + }, + { + "name": "nuke", + "extensions": [ + ".nk" + ] + } + ] + } + } } diff --git a/server_addon/traypublisher/server/settings/simple_creators.py b/server_addon/traypublisher/server/settings/simple_creators.py index 924eeedd23..6b979bbe52 100644 --- a/server_addon/traypublisher/server/settings/simple_creators.py +++ b/server_addon/traypublisher/server/settings/simple_creators.py @@ -142,6 +142,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".tiff", @@ -165,6 +166,7 @@ DEFAULT_SIMPLE_CREATORS = [ "extensions": [ ".exr", ".png", + ".dng", ".dpx", ".jpg", ".jpeg", @@ -215,6 +217,7 @@ DEFAULT_SIMPLE_CREATORS = [ ".exr", ".jpg", ".jpeg", + ".dng", ".dpx", ".bmp", ".tif", diff --git a/server_addon/traypublisher/server/version.py b/server_addon/traypublisher/server/version.py deleted file mode 100644 index e57ad00718..0000000000 --- a/server_addon/traypublisher/server/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring addon version.""" -__version__ = "0.1.3" diff --git a/server_addon/tvpaint/package.py b/server_addon/tvpaint/package.py new file mode 100644 index 0000000000..2be3164f4a --- /dev/null +++ b/server_addon/tvpaint/package.py @@ -0,0 +1,3 @@ +name = "tvpaint" +title = "TVPaint" +version = "0.1.2" diff --git a/server_addon/tvpaint/server/__init__.py b/server_addon/tvpaint/server/__init__.py index 033d7d3792..658dcf0bb6 100644 --- a/server_addon/tvpaint/server/__init__.py +++ b/server_addon/tvpaint/server/__init__.py @@ -2,14 +2,10 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import TvpaintSettings, DEFAULT_VALUES class TvpaintAddon(BaseServerAddon): - name = "tvpaint" - title = "TVPaint" - version = __version__ settings_model: Type[TvpaintSettings] = TvpaintSettings async def get_default_settings(self): diff --git a/server_addon/tvpaint/server/settings/main.py b/server_addon/tvpaint/server/settings/main.py index c6b6c9ab12..f20e9ecc9c 100644 --- a/server_addon/tvpaint/server/settings/main.py +++ b/server_addon/tvpaint/server/settings/main.py @@ -1,7 +1,6 @@ from ayon_server.settings import ( BaseSettingsModel, SettingsField, - ensure_unique_names, ) from .imageio import TVPaintImageIOModel diff --git a/server_addon/tvpaint/server/settings/publish_plugins.py b/server_addon/tvpaint/server/settings/publish_plugins.py index 0d978e5714..db1c7bd11a 100644 --- a/server_addon/tvpaint/server/settings/publish_plugins.py +++ b/server_addon/tvpaint/server/settings/publish_plugins.py @@ -1,5 +1,5 @@ from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ColorRGBA_uint8, ColorRGB_uint8 +from ayon_server.types import ColorRGBA_uint8 class CollectRenderInstancesModel(BaseSettingsModel): diff --git a/server_addon/tvpaint/server/version.py b/server_addon/tvpaint/server/version.py deleted file mode 100644 index b3f4756216..0000000000 --- a/server_addon/tvpaint/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.2" diff --git a/server_addon/unreal/package.py b/server_addon/unreal/package.py new file mode 100644 index 0000000000..cab89ca873 --- /dev/null +++ b/server_addon/unreal/package.py @@ -0,0 +1,3 @@ +name = "unreal" +title = "Unreal" +version = "0.1.0" diff --git a/server_addon/unreal/server/__init__.py b/server_addon/unreal/server/__init__.py index a5f3e9597d..751560b623 100644 --- a/server_addon/unreal/server/__init__.py +++ b/server_addon/unreal/server/__init__.py @@ -2,17 +2,11 @@ from typing import Type from ayon_server.addons import BaseServerAddon -from .version import __version__ from .settings import UnrealSettings, DEFAULT_VALUES class UnrealAddon(BaseServerAddon): - name = "unreal" - title = "Unreal" - version = __version__ settings_model: Type[UnrealSettings] = UnrealSettings - frontend_scopes = {} - services = {} async def get_default_settings(self): settings_model_cls = self.get_settings_model() diff --git a/server_addon/unreal/server/version.py b/server_addon/unreal/server/version.py deleted file mode 100644 index 3dc1f76bc6..0000000000 --- a/server_addon/unreal/server/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.1.0" diff --git a/tools/manage.ps1 b/tools/manage.ps1 new file mode 100755 index 0000000000..23c52d57be --- /dev/null +++ b/tools/manage.ps1 @@ -0,0 +1,283 @@ +<# +.SYNOPSIS + Helper script to run various tasks on ayon-core addon repository. + +.DESCRIPTION + This script will detect Python installation, and build OpenPype to `build` + directory using existing virtual environment created by Poetry (or + by running `/tools/create_venv.ps1`). It will then shuffle dependencies in + build folder to optimize for different Python versions (2/3) in Python host. + +.EXAMPLE + +PS> .\tools\manage.ps1 + +.EXAMPLE + +To create virtual environment using Poetry: +PS> .\tools\manage.ps1 create-env + +.EXAMPLE + +To run Ruff check: +PS> .\tools\manage.ps1 ruff-check + +.LINK +https://github.com/ynput/ayon-core + +#> + +# Settings and gitmodule init +$CurrentDir = Get-Location +$ScriptDir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$RepoRoot = (Get-Item $ScriptDir).parent.FullName +& git submodule update --init --recursive +$env:PSModulePath = $env:PSModulePath + ";$($openpype_root)\tools\modules\powershell" + +$FunctionName=$ARGS[0] +$Arguments=@() +if ($ARGS.Length -gt 1) { + $Arguments = $ARGS[1..($ARGS.Length - 1)] +} + +function Exit-WithCode($exitcode) { + # Only exit this host process if it's a child of another PowerShell parent process... + $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId + $parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name + if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) } + + exit $exitcode +} + +function Test-CommandExists { + param ( + [Parameter(Mandatory=$true)] + [string]$command + ) + + $commandExists = $null -ne (Get-Command $command -ErrorAction SilentlyContinue) + return $commandExists +} + +function Write-Info { + <# + .SYNOPSIS + Write-Info function to write information messages. + + It uses Write-Color if that is available, otherwise falls back to Write-Host. + + #> + [CmdletBinding()] + param ( + [alias ('T')] [String[]]$Text, + [alias ('C', 'ForegroundColor', 'FGC')] [ConsoleColor[]]$Color = [ConsoleColor]::White, + [alias ('B', 'BGC')] [ConsoleColor[]]$BackGroundColor = $null, + [alias ('Indent')][int] $StartTab = 0, + [int] $LinesBefore = 0, + [int] $LinesAfter = 0, + [int] $StartSpaces = 0, + [alias ('L')] [string] $LogFile = '', + [Alias('DateFormat', 'TimeFormat')][string] $DateTimeFormat = 'yyyy-MM-dd HH:mm:ss', + [alias ('LogTimeStamp')][bool] $LogTime = $true, + [int] $LogRetry = 2, + [ValidateSet('unknown', 'string', 'unicode', 'bigendianunicode', 'utf8', 'utf7', 'utf32', 'ascii', 'default', 'oem')][string]$Encoding = 'Unicode', + [switch] $ShowTime, + [switch] $NoNewLine + ) + if (Test-CommandExists "Write-Color") { + Write-Color -Text $Text -Color $Color -BackGroundColor $BackGroundColor -StartTab $StartTab -LinesBefore $LinesBefore -LinesAfter $LinesAfter -StartSpaces $StartSpaces -LogFile $LogFile -DateTimeFormat $DateTimeFormat -LogTime $LogTime -LogRetry $LogRetry -Encoding $Encoding -ShowTime $ShowTime -NoNewLine $NoNewLine + } else { + $message = $Text -join ' ' + if ($NoNewLine) + { + Write-Host $message -NoNewline + } + else + { + Write-Host $message + } + } +} + +$art = @" + + โ–„โ–ˆโ–ˆโ–„ + โ–„โ–ˆโ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–€ โ–„โ–ˆโ–ˆโ–€ โ–„โ–ˆโ–ˆโ–€โ–€โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–ˆโ–„ โ–ˆโ–„ + โ–„โ–„ โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–„ โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ โ–„ โ–€โ–ˆโ–ˆโ–„ โ–ˆโ–ˆโ–ˆ + โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–ˆโ–„ โ–€ โ–„โ–„ โ–€ โ–ˆโ–ˆ โ–„โ–ˆโ–ˆ โ–ˆโ–ˆโ–ˆ โ–€โ–ˆโ–ˆโ–„ โ–ˆโ–ˆโ–ˆ + โ–„โ–ˆโ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ โ–ˆโ–ˆ โ–€โ–ˆโ–ˆโ–„ โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–ˆโ–ˆ โ–€โ–ˆโ–ˆ โ–€โ–ˆโ–€ + โ–„โ–ˆโ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆ โ–€โ–ˆโ–ˆโ–„โ–„โ–„โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ + + ยท ยท - =[ by YNPUT ]:[ http://ayon.ynput.io ]= - ยท ยท + +"@ + +function Write-AsciiArt() { + Write-Host $art -ForegroundColor DarkGreen +} + +function Show-PSWarning() { + if ($PSVersionTable.PSVersion.Major -lt 7) { + Write-Info -Text "!!! ", "You are using old version of PowerShell - ", "$($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" -Color Red, Yellow, White + Write-Info -Text " Please update to at least 7.0 - ", "https://github.com/PowerShell/PowerShell/releases" -Color Yellow, White + Exit-WithCode 1 + } +} + +function Install-Poetry() { + Write-Info -Text ">>> ", "Installing Poetry ... " -Color Green, Gray + $python = "python" + if (Get-Command "pyenv" -ErrorAction SilentlyContinue) { + if (-not (Test-Path -PathType Leaf -Path "$($RepoRoot)\.python-version")) { + $result = & pyenv global + if ($result -eq "no global version configured") { + Write-Info "!!! Using pyenv but having no local or global version of Python set." -Color Red, Yellow + Exit-WithCode 1 + } + } + $python = & pyenv which python + + } + + $env:POETRY_HOME="$RepoRoot\.poetry" + (Invoke-WebRequest -Uri https://install.python-poetry.org/ -UseBasicParsing).Content | & $($python) - +} + +function Set-Cwd() { + Set-Location -Path $RepoRoot +} + +function Restore-Cwd() { + $tmp_current_dir = Get-Location + if ("$tmp_current_dir" -ne "$CurrentDir") { + Write-Info -Text ">>> ", "Restoring current directory" -Color Green, Gray + Set-Location -Path $CurrentDir + } +} + +function Initialize-Environment { + Write-Info -Text ">>> ", "Reading Poetry ... " -Color Green, Gray -NoNewline + if (-not(Test-Path -PathType Container -Path "$( $env:POETRY_HOME )\bin")) + { + Write-Info -Text "NOT FOUND" -Color Yellow + Install-Poetry + Write-Info -Text "INSTALLED" -Color Cyan + } + else + { + Write-Info -Text "OK" -Color Green + } + + if (-not(Test-Path -PathType Leaf -Path "$( $repo_root )\poetry.lock")) + { + Write-Info -Text ">>> ", "Installing virtual environment and creating lock." -Color Green, Gray + } + else + { + Write-Info -Text ">>> ", "Installing virtual environment from lock." -Color Green, Gray + } + $startTime = [int][double]::Parse((Get-Date -UFormat %s)) + & "$env:POETRY_HOME\bin\poetry" config virtualenvs.in-project true --local + & "$env:POETRY_HOME\bin\poetry" config virtualenvs.create true --local + & "$env:POETRY_HOME\bin\poetry" install --no-root $poetry_verbosity --ansi + if ($LASTEXITCODE -ne 0) + { + Write-Info -Text "!!! ", "Poetry command failed." -Color Red, Yellow + Restore-Cwd + Exit-WithCode 1 + } + if (Test-Path -PathType Container -Path "$( $repo_root )\.git") + { + Write-Info -Text ">>> ", "Installing pre-commit hooks ..." -Color Green, White + & "$env:POETRY_HOME\bin\poetry" run pre-commit install + if ($LASTEXITCODE -ne 0) + { + Write-Info -Text "!!! ", "Installation of pre-commit hooks failed." -Color Red, Yellow + } + } + $endTime = [int][double]::Parse((Get-Date -UFormat %s)) + Restore-Cwd + try + { + if (Test-CommandExists "New-BurntToastNotification") + { + $app_logo = "$repo_root\tools\icons\ayon.ico" + New-BurntToastNotification -AppLogo "$app_logo" -Text "AYON", "Virtual environment created.", "All done in $( $endTime - $startTime ) secs." + } + } + catch {} + Write-Info -Text ">>> ", "Virtual environment created." -Color Green, White +} + +function Invoke-Ruff { + param ( + [switch] $Fix + ) + $Poetry = "$RepoRoot\.poetry\bin\poetry.exe" + $RuffArgs = @( "run", "ruff", "check" ) + if ($Fix) { + $RuffArgs += "--fix" + } + & $Poetry $RuffArgs +} + +function Invoke-Codespell { + param ( + [switch] $Fix + ) + $Poetry = "$RepoRoot\.poetry\bin\poetry.exe" + $CodespellArgs = @( "run", "codespell" ) + if ($Fix) { + $CodespellArgs += "--fix" + } + & $Poetry $CodespellArgs +} + +function Write-Help { + <# + .SYNOPSIS + Write-Help function to write help messages. + #> + Write-Host "" + Write-Host "AYON Addon management script" + Write-Host "" + Write-Info -Text "Usage: ", "./manage.ps1 ", "[command]" -Color Gray, White, Cyan + Write-Host "" + Write-Host "Commands:" + Write-Info -Text " create-env ", "Install Poetry and update venv by lock file" -Color White, Cyan + Write-Info -Text " ruff-check ", "Run Ruff check for the repository" -Color White, Cyan + Write-Info -Text " ruff-fix ", "Run Ruff fix for the repository" -Color White, Cyan + Write-Info -Text " codespell ", "Run codespell check for the repository" -Color White, Cyan + Write-Host "" +} + +function Resolve-Function { + if ($null -eq $FunctionName) { + Write-Help + return + } + $FunctionName = $FunctionName.ToLower() -replace "\W" + if ($FunctionName -eq "createenv") { + Set-Cwd + Initialize-Environment + } elseif ($FunctionName -eq "ruffcheck") { + Set-Cwd + Invoke-Ruff + } elseif ($FunctionName -eq "rufffix") { + Set-Cwd + Invoke-Ruff -Fix + } elseif ($FunctionName -eq "codespell") { + Set-Cwd + Invoke-CodeSpell + } else { + Write-Host "Unknown function ""$FunctionName""" + Write-Help + } +} + +# ----------------------------------------------------- + +Show-PSWarning +Write-AsciiArt + +Resolve-Function diff --git a/tools/manage.sh b/tools/manage.sh new file mode 100755 index 0000000000..923953bf96 --- /dev/null +++ b/tools/manage.sh @@ -0,0 +1,222 @@ +# Colors for terminal + +RST='\033[0m' # Text Reset + +# Regular Colors +Black='\033[0;30m' # Black +Red='\033[0;31m' # Red +Green='\033[0;32m' # Green +Yellow='\033[0;33m' # Yellow +Blue='\033[0;34m' # Blue +Purple='\033[0;35m' # Purple +Cyan='\033[0;36m' # Cyan +White='\033[0;37m' # White + +# Bold +BBlack='\033[1;30m' # Black +BRed='\033[1;31m' # Red +BGreen='\033[1;32m' # Green +BYellow='\033[1;33m' # Yellow +BBlue='\033[1;34m' # Blue +BPurple='\033[1;35m' # Purple +BCyan='\033[1;36m' # Cyan +BWhite='\033[1;37m' # White + +# Bold High Intensity +BIBlack='\033[1;90m' # Black +BIRed='\033[1;91m' # Red +BIGreen='\033[1;92m' # Green +BIYellow='\033[1;93m' # Yellow +BIBlue='\033[1;94m' # Blue +BIPurple='\033[1;95m' # Purple +BICyan='\033[1;96m' # Cyan +BIWhite='\033[1;97m' # White + + +############################################################################## +# Detect required version of python +# Globals: +# colors +# PYTHON +# Arguments: +# None +# Returns: +# None +############################################################################### +detect_python () { + echo -e "${BIGreen}>>>${RST} Using python \c" + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}- NOT FOUND${RST} ${BIYellow}You need Python 3.9 installed to continue.${RST}"; return 1; } + local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" + local python_version="$(python <<< ${version_command})" + oIFS="$IFS" + IFS=. + set -- $python_version + IFS="$oIFS" + if [ "$1" -ge "3" ] && [ "$2" -ge "9" ] ; then + if [ "$2" -gt "9" ] ; then + echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.9.x${RST}"; return 1; + else + echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}" + fi + else + command -v python >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; } + fi +} + +install_poetry () { + echo -e "${BIGreen}>>>${RST} Installing Poetry ..." + export POETRY_HOME="$repo_root/.poetry" + command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; } + curl -sSL https://install.python-poetry.org/ | python - +} + +############################################################################## +# Return absolute path +# Globals: +# None +# Arguments: +# Path to resolve +# Returns: +# None +############################################################################### +realpath () { + echo $(cd $(dirname "$1"); pwd)/$(basename "$1") +} + +############################################################################## +# Create Virtual Environment +# Globals: +# repo_root +# POETRY_HOME +# poetry_verbosity +# Arguments: +# Path to resolve +# Returns: +# None +############################################################################### +create_env () { + # Directories + pushd "$repo_root" > /dev/null || return > /dev/null + + echo -e "${BIGreen}>>>${RST} Reading Poetry ... \c" + if [ -f "$POETRY_HOME/bin/poetry" ]; then + echo -e "${BIGreen}OK${RST}" + else + echo -e "${BIYellow}NOT FOUND${RST}" + install_poetry || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return 1; } + fi + + if [ -f "$repo_root/poetry.lock" ]; then + echo -e "${BIGreen}>>>${RST} Updating dependencies ..." + else + echo -e "${BIGreen}>>>${RST} Installing dependencies ..." + fi + + "$POETRY_HOME/bin/poetry" install --no-root $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return 1; } + if [ $? -ne 0 ] ; then + echo -e "${BIRed}!!!${RST} Virtual environment creation failed." + return 1 + fi + + echo -e "${BIGreen}>>>${RST} Cleaning cache files ..." + clean_pyc + + "$POETRY_HOME/bin/poetry" run python -m pip install --disable-pip-version-check --force-reinstall pip + + if [ -d "$repo_root/.git" ]; then + echo -e "${BIGreen}>>>${RST} Installing pre-commit hooks ..." + "$POETRY_HOME/bin/poetry" run pre-commit install + fi +} + +print_art() { + echo -e "${BGreen}" + cat <<-EOF + + โ–„โ–ˆโ–ˆโ–„ + โ–„โ–ˆโ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–€ โ–„โ–ˆโ–ˆโ–€ โ–„โ–ˆโ–ˆโ–€โ–€โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–ˆโ–„ โ–ˆโ–„ + โ–„โ–„ โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆโ–ˆโ–„ โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ โ–„ โ–€โ–ˆโ–ˆโ–„ โ–ˆโ–ˆโ–ˆ + โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–ˆโ–„ โ–€ โ–„โ–„ โ–€ โ–ˆโ–ˆ โ–„โ–ˆโ–ˆ โ–ˆโ–ˆโ–ˆ โ–€โ–ˆโ–ˆโ–„ โ–ˆโ–ˆโ–ˆ + โ–„โ–ˆโ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ โ–ˆโ–ˆ โ–€โ–ˆโ–ˆโ–„ โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–ˆโ–ˆ โ–€โ–ˆโ–ˆ โ–€โ–ˆโ–€ + โ–„โ–ˆโ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ โ–€โ–ˆ โ–€โ–ˆโ–ˆโ–„โ–„โ–„โ–„โ–ˆโ–ˆโ–€ โ–ˆโ–€ โ–€โ–ˆโ–ˆโ–„ + + ยท ยท - =[ by YNPUT ]:[ http://ayon.ynput.io ]= - ยท ยท + +EOF + echo -e "${RST}" +} + +default_help() { + print_art + echo -e "${BWhite}AYON Addon management script${RST}" + echo "" + echo -e "Usage: ${BWhite}./manage.sh${RST} ${BICyan}[command]${RST}" + echo "" + echo -e "${BWhite}Commands:${RST}" + echo -e " ${BWhite}create-env${RST} ${BCyan}Install Poetry and update venv by lock file${RST}" + echo -e " ${BWhite}ruff-check${RST} ${BCyan}Run Ruff check for the repository${RST}" + echo -e " ${BWhite}ruff-fix${RST} ${BCyan}Run Ruff fix for the repository${RST}" + echo -e " ${BWhite}codespell${RST} ${BCyan}Run codespell check for the repository${RST}" + echo "" +} + +run_ruff () { + echo -e "${BIGreen}>>>${RST} Running Ruff check ..." + "$POETRY_HOME/bin/poetry" run ruff check +} + +run_ruff_check () { + echo -e "${BIGreen}>>>${RST} Running Ruff fix ..." + "$POETRY_HOME/bin/poetry" run ruff check --fix +} + +run_codespell () { + echo -e "${BIGreen}>>>${RST} Running codespell check ..." + "$POETRY_HOME/bin/poetry" run codespell +} + +main () { + detect_python || return 1 + + # Directories + repo_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) + + if [[ -z $POETRY_HOME ]]; then + export POETRY_HOME="$repo_root/.poetry" + fi + + pushd "$repo_root" > /dev/null || return > /dev/null + + # Use first argument, lower and keep only characters + function_name="$(echo "$1" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z]*//g')" + + case $function_name in + "createenv") + create_env || return_code=$? + exit $return_code + ;; + "ruffcheck") + run_ruff || return_code=$? + exit $return_code + ;; + "rufffix") + run_ruff_check || return_code=$? + exit $return_code + ;; + "codespell") + run_codespell || return_code=$? + exit $return_code + ;; + esac + + if [ "$function_name" != "" ]; then + echo -e "${BIRed}!!!${RST} Unknown function name: $function_name" + fi + + default_help + exit $return_code +} + +return_code=0 +main "$@" || return_code=$? +exit $return_code