mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into bugfix/OP-7981_resolve_swapping_versions_fix_offset
This commit is contained in:
commit
ccd50f4608
851 changed files with 17999 additions and 31132 deletions
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,6 +1,6 @@
|
|||
name: Bug Report
|
||||
description: File a bug report
|
||||
title: 'Bug: '
|
||||
title: 'Your issue title here'
|
||||
labels:
|
||||
- 'type: bug'
|
||||
body:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
name: Enhancement Request
|
||||
description: Create a report to help us enhance a particular feature
|
||||
title: "Enhancement: "
|
||||
title: "Your issue title here"
|
||||
labels:
|
||||
- "type: enhancement"
|
||||
body:
|
||||
|
|
@ -49,4 +49,4 @@ body:
|
|||
label: "Additional context:"
|
||||
description: Add any other context or screenshots about the enhancement request here.
|
||||
validations:
|
||||
required: false
|
||||
required: false
|
||||
|
|
|
|||
28
.github/workflows/issue_to_clickup_trigger.yml
vendored
Normal file
28
.github/workflows/issue_to_clickup_trigger.yml
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
name: Sync Issues to ClickUp [trigger]
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue-number:
|
||||
required: true
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
|
||||
jobs:
|
||||
call-ci-tools-issue-sync:
|
||||
if: github.event.inputs.issue-number != '' || github.event_name == 'issues' && contains(github.event.issue.labels.*.name, 'backlog')
|
||||
uses: ynput/ci-tools/.github/workflows/issue_to_clickup_ref.yml@main
|
||||
with:
|
||||
# issue number should be taken either from inputs or from the event
|
||||
issue-number: ${{ github.event.inputs.issue-number || github.event.issue.number }}
|
||||
repo-owner: ${{ github.event.repository.owner.login }}
|
||||
repo-name: ${{ github.event.repository.name }}
|
||||
secrets:
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
cu_api_key: ${{ secrets.CLICKUP_API_KEY }}
|
||||
cu_team_id: ${{ secrets.CLICKUP_TEAM_ID }}
|
||||
cu_folder_id: ${{ secrets.CLICKUP_FOLDER_ID }}
|
||||
cu_list_id: ${{ secrets.CLICKUP_LIST_ID }}
|
||||
cu_field_domain_id: ${{ secrets.CLICKUP_DOMAIN_FIELD_ID }}
|
||||
cu_field_type_id: ${{ secrets.CLICKUP_ISSUETYPE_FIELD_ID }}
|
||||
|
|
@ -1,12 +1,16 @@
|
|||
import os
|
||||
from .version import __version__
|
||||
|
||||
|
||||
AYON_CORE_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# TODO remove after '1.x.x'
|
||||
# -------------------------
|
||||
# DEPRECATED - Remove before '1.x.x' release
|
||||
# -------------------------
|
||||
PACKAGE_DIR = AYON_CORE_ROOT
|
||||
PLUGINS_DIR = os.path.join(AYON_CORE_ROOT, "plugins")
|
||||
AYON_SERVER_ENABLED = True
|
||||
|
||||
# Indicate if AYON entities should be used instead of OpenPype entities
|
||||
USE_AYON_ENTITIES = False
|
||||
USE_AYON_ENTITIES = True
|
||||
# -------------------------
|
||||
|
|
|
|||
|
|
@ -14,14 +14,10 @@ from abc import ABCMeta, abstractmethod
|
|||
|
||||
import six
|
||||
import appdirs
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.client import get_ayon_server_api_connection
|
||||
from ayon_core.settings import get_system_settings
|
||||
from ayon_core.settings.ayon_settings import (
|
||||
is_dev_mode_enabled,
|
||||
get_ayon_settings,
|
||||
)
|
||||
from ayon_core.lib import Logger, is_dev_mode_enabled
|
||||
from ayon_core.settings import get_studio_settings
|
||||
|
||||
from .interfaces import (
|
||||
IPluginPaths,
|
||||
|
|
@ -151,8 +147,7 @@ def load_addons(force=False):
|
|||
|
||||
|
||||
def _get_ayon_bundle_data():
|
||||
con = get_ayon_server_api_connection()
|
||||
bundles = con.get_bundles()["bundles"]
|
||||
bundles = ayon_api.get_bundles()["bundles"]
|
||||
|
||||
bundle_name = os.getenv("AYON_BUNDLE_NAME")
|
||||
|
||||
|
|
@ -180,8 +175,7 @@ def _get_ayon_addons_information(bundle_info):
|
|||
|
||||
output = []
|
||||
bundle_addons = bundle_info["addons"]
|
||||
con = get_ayon_server_api_connection()
|
||||
addons = con.get_addons_info()["addons"]
|
||||
addons = ayon_api.get_addons_info()["addons"]
|
||||
for addon in addons:
|
||||
name = addon["name"]
|
||||
versions = addon.get("versions")
|
||||
|
|
@ -648,7 +642,6 @@ class AddonsManager:
|
|||
|
||||
def __init__(self, settings=None, initialize=True):
|
||||
self._settings = settings
|
||||
self._system_settings = None
|
||||
|
||||
self._addons = []
|
||||
self._addons_by_id = {}
|
||||
|
|
@ -738,14 +731,9 @@ class AddonsManager:
|
|||
# Prepare settings for addons
|
||||
settings = self._settings
|
||||
if settings is None:
|
||||
settings = get_ayon_settings()
|
||||
settings = get_studio_settings()
|
||||
|
||||
# OpenPype settings
|
||||
system_settings = self._system_settings
|
||||
if system_settings is None:
|
||||
system_settings = get_system_settings()
|
||||
|
||||
modules_settings = system_settings["modules"]
|
||||
modules_settings = {}
|
||||
|
||||
report = {}
|
||||
time_start = time.time()
|
||||
|
|
@ -788,6 +776,7 @@ class AddonsManager:
|
|||
|
||||
addon_classes.append(modules_item)
|
||||
|
||||
aliased_names = []
|
||||
for addon_cls in addon_classes:
|
||||
name = addon_cls.__name__
|
||||
if issubclass(addon_cls, OpenPypeModule):
|
||||
|
|
@ -807,6 +796,13 @@ class AddonsManager:
|
|||
self._addons.append(addon)
|
||||
self._addons_by_id[addon.id] = addon
|
||||
self._addons_by_name[addon.name] = addon
|
||||
# NOTE This will be removed with release 1.0.0 of ayon-core
|
||||
# please use carefully.
|
||||
# Gives option to use alias name for addon for cases when
|
||||
# name in OpenPype was not the same as in AYON.
|
||||
name_alias = getattr(addon, "openpype_alias", None)
|
||||
if name_alias:
|
||||
aliased_names.append((name_alias, addon))
|
||||
enabled_str = "X"
|
||||
if not addon.enabled:
|
||||
enabled_str = " "
|
||||
|
|
@ -822,6 +818,17 @@ class AddonsManager:
|
|||
exc_info=True
|
||||
)
|
||||
|
||||
for item in aliased_names:
|
||||
name_alias, addon = item
|
||||
if name_alias not in self._addons_by_name:
|
||||
self._addons_by_name[name_alias] = addon
|
||||
continue
|
||||
self.log.warning(
|
||||
"Alias name '{}' of addon '{}' is already assigned.".format(
|
||||
name_alias, addon.name
|
||||
)
|
||||
)
|
||||
|
||||
if self._report is not None:
|
||||
report[self._report_total_key] = time.time() - time_start
|
||||
self._report["Initialization"] = report
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import acre
|
|||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.addon import AddonsManager
|
||||
from ayon_core.settings import get_general_environments
|
||||
from ayon_core.lib import initialize_ayon_connection
|
||||
|
||||
from .cli_commands import Commands
|
||||
|
||||
|
|
@ -102,19 +103,18 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup):
|
|||
|
||||
|
||||
@main_cli.command()
|
||||
@click.argument("paths", nargs=-1)
|
||||
@click.option("-t", "--targets", help="Targets module", default=None,
|
||||
@click.argument("path", required=True)
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
@click.option("-g", "--gui", is_flag=True,
|
||||
help="Show Publish UI", default=False)
|
||||
def publish(paths, targets, gui):
|
||||
def publish(path, targets, gui):
|
||||
"""Start CLI publishing.
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
Publish collects json from path provided as an argument.
|
||||
S
|
||||
"""
|
||||
|
||||
Commands.publish(list(paths), targets, gui)
|
||||
Commands.publish(path, targets, gui)
|
||||
|
||||
|
||||
@main_cli.command(context_settings={"ignore_unknown_options": True})
|
||||
|
|
@ -243,6 +243,7 @@ def _set_addons_environments():
|
|||
|
||||
|
||||
def main(*args, **kwargs):
|
||||
initialize_ayon_connection()
|
||||
python_path = os.getenv("PYTHONPATH", "")
|
||||
split_paths = python_path.split(os.pathsep)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import warnings
|
||||
|
||||
|
||||
class Commands:
|
||||
|
|
@ -41,21 +42,21 @@ class Commands:
|
|||
return click_func
|
||||
|
||||
@staticmethod
|
||||
def publish(paths, targets=None, gui=False):
|
||||
def publish(path: str, targets: list=None, gui:bool=False) -> None:
|
||||
"""Start headless publishing.
|
||||
|
||||
Publish use json from passed paths argument.
|
||||
Publish use json from passed path argument.
|
||||
|
||||
Args:
|
||||
paths (list): Paths to jsons.
|
||||
targets (string): What module should be targeted
|
||||
(to choose validator for example)
|
||||
path (str): Path to JSON.
|
||||
targets (list of str): List of pyblish targets.
|
||||
gui (bool): Show publish UI.
|
||||
|
||||
Raises:
|
||||
RuntimeError: When there is no path to process.
|
||||
"""
|
||||
RuntimeError: When executed with list of JSON paths.
|
||||
|
||||
"""
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.lib.applications import (
|
||||
get_app_environments_for_context,
|
||||
|
|
@ -73,6 +74,23 @@ class Commands:
|
|||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
if not isinstance(path, str):
|
||||
raise RuntimeError("Path to JSON must be a string.")
|
||||
|
||||
# Fix older jobs
|
||||
for src_key, dst_key in (
|
||||
("AVALON_PROJECT", "AYON_PROJECT_NAME"),
|
||||
("AVALON_ASSET", "AYON_FOLDER_PATH"),
|
||||
("AVALON_TASK", "AYON_TASK_NAME"),
|
||||
("AVALON_WORKDIR", "AYON_WORKDIR"),
|
||||
("AVALON_APP_NAME", "AYON_APP_NAME"),
|
||||
("AVALON_APP", "AYON_HOST_NAME"),
|
||||
):
|
||||
if src_key in os.environ and dst_key not in os.environ:
|
||||
os.environ[dst_key] = os.environ[src_key]
|
||||
# Remove old keys, so we're sure they're not used
|
||||
os.environ.pop(src_key, None)
|
||||
|
||||
log = Logger.get_logger("CLI-publish")
|
||||
|
||||
install_ayon_plugins()
|
||||
|
|
@ -81,18 +99,15 @@ class Commands:
|
|||
|
||||
publish_paths = manager.collect_plugin_paths()["publish"]
|
||||
|
||||
for path in publish_paths:
|
||||
pyblish.api.register_plugin_path(path)
|
||||
for plugin_path in publish_paths:
|
||||
pyblish.api.register_plugin_path(plugin_path)
|
||||
|
||||
if not any(paths):
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
app_full_name = os.getenv("AVALON_APP_NAME")
|
||||
app_full_name = os.getenv("AYON_APP_NAME")
|
||||
if app_full_name:
|
||||
context = get_global_context()
|
||||
env = get_app_environments_for_context(
|
||||
context["project_name"],
|
||||
context["asset_name"],
|
||||
context["folder_path"],
|
||||
context["task_name"],
|
||||
app_full_name,
|
||||
launch_type=LaunchTypes.farm_publish,
|
||||
|
|
@ -108,7 +123,7 @@ class Commands:
|
|||
else:
|
||||
pyblish.api.register_target("farm")
|
||||
|
||||
os.environ["AYON_PUBLISH_DATA"] = os.pathsep.join(paths)
|
||||
os.environ["AYON_PUBLISH_DATA"] = path
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
|
@ -167,7 +182,7 @@ class Commands:
|
|||
json.dump(env, file_stream, indent=4)
|
||||
|
||||
@staticmethod
|
||||
def contextselection(output_path, project_name, asset_name, strict):
|
||||
def contextselection(output_path, project_name, folder_path, strict):
|
||||
from ayon_core.tools.context_dialog import main
|
||||
|
||||
main(output_path, project_name, asset_name, strict)
|
||||
main(output_path, project_name, folder_path, strict)
|
||||
|
|
|
|||
|
|
@ -1,110 +0,0 @@
|
|||
from .utils import get_ayon_server_api_connection
|
||||
|
||||
from .entities import (
|
||||
get_projects,
|
||||
get_project,
|
||||
get_whole_project,
|
||||
|
||||
get_asset_by_id,
|
||||
get_asset_by_name,
|
||||
get_assets,
|
||||
get_archived_assets,
|
||||
get_asset_ids_with_subsets,
|
||||
|
||||
get_subset_by_id,
|
||||
get_subset_by_name,
|
||||
get_subsets,
|
||||
get_subset_families,
|
||||
|
||||
get_version_by_id,
|
||||
get_version_by_name,
|
||||
get_versions,
|
||||
get_hero_version_by_id,
|
||||
get_hero_version_by_subset_id,
|
||||
get_hero_versions,
|
||||
get_last_versions,
|
||||
get_last_version_by_subset_id,
|
||||
get_last_version_by_subset_name,
|
||||
get_output_link_versions,
|
||||
|
||||
version_is_latest,
|
||||
|
||||
get_representation_by_id,
|
||||
get_representation_by_name,
|
||||
get_representations,
|
||||
get_representation_parents,
|
||||
get_representations_parents,
|
||||
get_archived_representations,
|
||||
|
||||
get_thumbnail,
|
||||
get_thumbnails,
|
||||
get_thumbnail_id_from_source,
|
||||
|
||||
get_workfile_info,
|
||||
|
||||
get_asset_name_identifier,
|
||||
)
|
||||
|
||||
from .entity_links import (
|
||||
get_linked_asset_ids,
|
||||
get_linked_assets,
|
||||
get_linked_representation_id,
|
||||
)
|
||||
|
||||
from .operations import (
|
||||
create_project,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"get_ayon_server_api_connection",
|
||||
|
||||
"get_projects",
|
||||
"get_project",
|
||||
"get_whole_project",
|
||||
|
||||
"get_asset_by_id",
|
||||
"get_asset_by_name",
|
||||
"get_assets",
|
||||
"get_archived_assets",
|
||||
"get_asset_ids_with_subsets",
|
||||
|
||||
"get_subset_by_id",
|
||||
"get_subset_by_name",
|
||||
"get_subsets",
|
||||
"get_subset_families",
|
||||
|
||||
"get_version_by_id",
|
||||
"get_version_by_name",
|
||||
"get_versions",
|
||||
"get_hero_version_by_id",
|
||||
"get_hero_version_by_subset_id",
|
||||
"get_hero_versions",
|
||||
"get_last_versions",
|
||||
"get_last_version_by_subset_id",
|
||||
"get_last_version_by_subset_name",
|
||||
"get_output_link_versions",
|
||||
|
||||
"version_is_latest",
|
||||
|
||||
"get_representation_by_id",
|
||||
"get_representation_by_name",
|
||||
"get_representations",
|
||||
"get_representation_parents",
|
||||
"get_representations_parents",
|
||||
"get_archived_representations",
|
||||
|
||||
"get_thumbnail",
|
||||
"get_thumbnails",
|
||||
"get_thumbnail_id_from_source",
|
||||
|
||||
"get_workfile_info",
|
||||
|
||||
"get_linked_asset_ids",
|
||||
"get_linked_assets",
|
||||
"get_linked_representation_id",
|
||||
|
||||
"create_project",
|
||||
|
||||
"get_asset_name_identifier",
|
||||
)
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
# --- Folders ---
|
||||
DEFAULT_FOLDER_FIELDS = {
|
||||
"id",
|
||||
"name",
|
||||
"path",
|
||||
"parentId",
|
||||
"active",
|
||||
"parents",
|
||||
"thumbnailId"
|
||||
}
|
||||
|
||||
REPRESENTATION_FILES_FIELDS = {
|
||||
"files.name",
|
||||
"files.hash",
|
||||
"files.id",
|
||||
"files.path",
|
||||
"files.size",
|
||||
}
|
||||
|
||||
CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
|
||||
CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
|
||||
CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
|
||||
CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
|
||||
CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
|
||||
CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0"
|
||||
CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
|
||||
CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
|
||||
CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,741 +0,0 @@
|
|||
import collections
|
||||
|
||||
from .constants import CURRENT_THUMBNAIL_SCHEMA
|
||||
from .utils import get_ayon_server_api_connection
|
||||
from .openpype_comp import get_folders_with_tasks
|
||||
from .conversion_utils import (
|
||||
project_fields_v3_to_v4,
|
||||
convert_v4_project_to_v3,
|
||||
|
||||
folder_fields_v3_to_v4,
|
||||
convert_v4_folder_to_v3,
|
||||
|
||||
subset_fields_v3_to_v4,
|
||||
convert_v4_subset_to_v3,
|
||||
|
||||
version_fields_v3_to_v4,
|
||||
convert_v4_version_to_v3,
|
||||
|
||||
representation_fields_v3_to_v4,
|
||||
convert_v4_representation_to_v3,
|
||||
|
||||
workfile_info_fields_v3_to_v4,
|
||||
convert_v4_workfile_info_to_v3,
|
||||
)
|
||||
|
||||
|
||||
def get_asset_name_identifier(asset_doc):
|
||||
"""Get asset name identifier by asset document.
|
||||
|
||||
This function is added because of AYON implementation where name
|
||||
identifier is not just a name but full path.
|
||||
|
||||
Asset document must have "name" key, and "data.parents" when in AYON mode.
|
||||
|
||||
Args:
|
||||
asset_doc (dict[str, Any]): Asset document.
|
||||
"""
|
||||
|
||||
parents = list(asset_doc["data"]["parents"])
|
||||
parents.append(asset_doc["name"])
|
||||
return "/" + "/".join(parents)
|
||||
|
||||
|
||||
def get_projects(active=True, inactive=False, library=None, fields=None):
|
||||
if not active and not inactive:
|
||||
return
|
||||
|
||||
if active and inactive:
|
||||
active = None
|
||||
elif active:
|
||||
active = True
|
||||
elif inactive:
|
||||
active = False
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = project_fields_v3_to_v4(fields, con)
|
||||
for project in con.get_projects(active, library, fields=fields):
|
||||
yield convert_v4_project_to_v3(project)
|
||||
|
||||
|
||||
def get_project(project_name, active=True, inactive=False, fields=None):
|
||||
# Skip if both are disabled
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = project_fields_v3_to_v4(fields, con)
|
||||
return convert_v4_project_to_v3(
|
||||
con.get_project(project_name, fields=fields)
|
||||
)
|
||||
|
||||
|
||||
def get_whole_project(*args, **kwargs):
|
||||
raise NotImplementedError("'get_whole_project' not implemented")
|
||||
|
||||
|
||||
def _get_subsets(
|
||||
project_name,
|
||||
subset_ids=None,
|
||||
subset_names=None,
|
||||
folder_ids=None,
|
||||
names_by_folder_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
):
|
||||
# Convert fields and add minimum required fields
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = subset_fields_v3_to_v4(fields, con)
|
||||
if fields is not None:
|
||||
for key in (
|
||||
"id",
|
||||
"active"
|
||||
):
|
||||
fields.add(key)
|
||||
|
||||
active = True
|
||||
if archived:
|
||||
active = None
|
||||
|
||||
for subset in con.get_products(
|
||||
project_name,
|
||||
product_ids=subset_ids,
|
||||
product_names=subset_names,
|
||||
folder_ids=folder_ids,
|
||||
names_by_folder_ids=names_by_folder_ids,
|
||||
active=active,
|
||||
fields=fields,
|
||||
):
|
||||
yield convert_v4_subset_to_v3(subset)
|
||||
|
||||
|
||||
def _get_versions(
|
||||
project_name,
|
||||
version_ids=None,
|
||||
subset_ids=None,
|
||||
versions=None,
|
||||
hero=True,
|
||||
standard=True,
|
||||
latest=None,
|
||||
active=None,
|
||||
fields=None
|
||||
):
|
||||
con = get_ayon_server_api_connection()
|
||||
|
||||
fields = version_fields_v3_to_v4(fields, con)
|
||||
|
||||
# Make sure 'productId' and 'version' are available when hero versions
|
||||
# are queried
|
||||
if fields and hero:
|
||||
fields = set(fields)
|
||||
fields |= {"productId", "version"}
|
||||
|
||||
queried_versions = con.get_versions(
|
||||
project_name,
|
||||
version_ids=version_ids,
|
||||
product_ids=subset_ids,
|
||||
versions=versions,
|
||||
hero=hero,
|
||||
standard=standard,
|
||||
latest=latest,
|
||||
active=active,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
version_entities = []
|
||||
hero_versions = []
|
||||
for version in queried_versions:
|
||||
if version["version"] < 0:
|
||||
hero_versions.append(version)
|
||||
else:
|
||||
version_entities.append(convert_v4_version_to_v3(version))
|
||||
|
||||
if hero_versions:
|
||||
subset_ids = set()
|
||||
versions_nums = set()
|
||||
for hero_version in hero_versions:
|
||||
versions_nums.add(abs(hero_version["version"]))
|
||||
subset_ids.add(hero_version["productId"])
|
||||
|
||||
hero_eq_versions = con.get_versions(
|
||||
project_name,
|
||||
product_ids=subset_ids,
|
||||
versions=versions_nums,
|
||||
hero=False,
|
||||
fields=["id", "version", "productId"]
|
||||
)
|
||||
hero_eq_by_subset_id = collections.defaultdict(list)
|
||||
for version in hero_eq_versions:
|
||||
hero_eq_by_subset_id[version["productId"]].append(version)
|
||||
|
||||
for hero_version in hero_versions:
|
||||
abs_version = abs(hero_version["version"])
|
||||
subset_id = hero_version["productId"]
|
||||
version_id = None
|
||||
for version in hero_eq_by_subset_id.get(subset_id, []):
|
||||
if version["version"] == abs_version:
|
||||
version_id = version["id"]
|
||||
break
|
||||
conv_hero = convert_v4_version_to_v3(hero_version)
|
||||
conv_hero["version_id"] = version_id
|
||||
version_entities.append(conv_hero)
|
||||
|
||||
return version_entities
|
||||
|
||||
|
||||
def get_asset_by_id(project_name, asset_id, fields=None):
|
||||
assets = get_assets(
|
||||
project_name, asset_ids=[asset_id], fields=fields
|
||||
)
|
||||
for asset in assets:
|
||||
return asset
|
||||
return None
|
||||
|
||||
|
||||
def get_asset_by_name(project_name, asset_name, fields=None):
|
||||
assets = get_assets(
|
||||
project_name, asset_names=[asset_name], fields=fields
|
||||
)
|
||||
for asset in assets:
|
||||
return asset
|
||||
return None
|
||||
|
||||
|
||||
def _folders_query(project_name, con, fields, **kwargs):
|
||||
if fields is None or "tasks" in fields:
|
||||
folders = get_folders_with_tasks(
|
||||
con, project_name, fields=fields, **kwargs
|
||||
)
|
||||
|
||||
else:
|
||||
folders = con.get_folders(project_name, fields=fields, **kwargs)
|
||||
|
||||
for folder in folders:
|
||||
yield folder
|
||||
|
||||
|
||||
def get_assets(
|
||||
project_name,
|
||||
asset_ids=None,
|
||||
asset_names=None,
|
||||
parent_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
):
|
||||
if not project_name:
|
||||
return
|
||||
|
||||
active = True
|
||||
if archived:
|
||||
active = None
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = folder_fields_v3_to_v4(fields, con)
|
||||
kwargs = dict(
|
||||
folder_ids=asset_ids,
|
||||
parent_ids=parent_ids,
|
||||
active=active,
|
||||
)
|
||||
if not asset_names:
|
||||
for folder in _folders_query(project_name, con, fields, **kwargs):
|
||||
yield convert_v4_folder_to_v3(folder, project_name)
|
||||
return
|
||||
|
||||
new_asset_names = set()
|
||||
folder_paths = set()
|
||||
for name in asset_names:
|
||||
if "/" in name:
|
||||
folder_paths.add(name)
|
||||
else:
|
||||
new_asset_names.add(name)
|
||||
|
||||
yielded_ids = set()
|
||||
if folder_paths:
|
||||
for folder in _folders_query(
|
||||
project_name, con, fields, folder_paths=folder_paths, **kwargs
|
||||
):
|
||||
yielded_ids.add(folder["id"])
|
||||
yield convert_v4_folder_to_v3(folder, project_name)
|
||||
|
||||
if not new_asset_names:
|
||||
return
|
||||
|
||||
for folder in _folders_query(
|
||||
project_name, con, fields, folder_names=new_asset_names, **kwargs
|
||||
):
|
||||
if folder["id"] not in yielded_ids:
|
||||
yielded_ids.add(folder["id"])
|
||||
yield convert_v4_folder_to_v3(folder, project_name)
|
||||
|
||||
|
||||
def get_archived_assets(
|
||||
project_name,
|
||||
asset_ids=None,
|
||||
asset_names=None,
|
||||
parent_ids=None,
|
||||
fields=None
|
||||
):
|
||||
return get_assets(
|
||||
project_name,
|
||||
asset_ids,
|
||||
asset_names,
|
||||
parent_ids,
|
||||
True,
|
||||
fields
|
||||
)
|
||||
|
||||
|
||||
def get_asset_ids_with_subsets(project_name, asset_ids=None):
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.get_folder_ids_with_products(project_name, asset_ids)
|
||||
|
||||
|
||||
def get_subset_by_id(project_name, subset_id, fields=None):
|
||||
subsets = get_subsets(
|
||||
project_name, subset_ids=[subset_id], fields=fields
|
||||
)
|
||||
for subset in subsets:
|
||||
return subset
|
||||
return None
|
||||
|
||||
|
||||
def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
|
||||
subsets = get_subsets(
|
||||
project_name,
|
||||
subset_names=[subset_name],
|
||||
asset_ids=[asset_id],
|
||||
fields=fields
|
||||
)
|
||||
for subset in subsets:
|
||||
return subset
|
||||
return None
|
||||
|
||||
|
||||
def get_subsets(
|
||||
project_name,
|
||||
subset_ids=None,
|
||||
subset_names=None,
|
||||
asset_ids=None,
|
||||
names_by_asset_ids=None,
|
||||
archived=False,
|
||||
fields=None
|
||||
):
|
||||
return _get_subsets(
|
||||
project_name,
|
||||
subset_ids,
|
||||
subset_names,
|
||||
asset_ids,
|
||||
names_by_asset_ids,
|
||||
archived,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_subset_families(project_name, subset_ids=None):
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.get_product_type_names(project_name, subset_ids)
|
||||
|
||||
|
||||
def get_version_by_id(project_name, version_id, fields=None):
|
||||
versions = get_versions(
|
||||
project_name,
|
||||
version_ids=[version_id],
|
||||
fields=fields,
|
||||
hero=True
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_version_by_name(project_name, version, subset_id, fields=None):
|
||||
versions = get_versions(
|
||||
project_name,
|
||||
subset_ids=[subset_id],
|
||||
versions=[version],
|
||||
fields=fields
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_versions(
|
||||
project_name,
|
||||
version_ids=None,
|
||||
subset_ids=None,
|
||||
versions=None,
|
||||
hero=False,
|
||||
fields=None
|
||||
):
|
||||
return _get_versions(
|
||||
project_name,
|
||||
version_ids,
|
||||
subset_ids,
|
||||
versions,
|
||||
hero=hero,
|
||||
standard=True,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_hero_version_by_id(project_name, version_id, fields=None):
|
||||
versions = get_hero_versions(
|
||||
project_name,
|
||||
version_ids=[version_id],
|
||||
fields=fields
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_hero_version_by_subset_id(
|
||||
project_name, subset_id, fields=None
|
||||
):
|
||||
versions = get_hero_versions(
|
||||
project_name,
|
||||
subset_ids=[subset_id],
|
||||
fields=fields
|
||||
)
|
||||
for version in versions:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
def get_hero_versions(
|
||||
project_name, subset_ids=None, version_ids=None, fields=None
|
||||
):
|
||||
return _get_versions(
|
||||
project_name,
|
||||
version_ids=version_ids,
|
||||
subset_ids=subset_ids,
|
||||
hero=True,
|
||||
standard=False,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_last_versions(project_name, subset_ids, active=None, fields=None):
|
||||
if fields:
|
||||
fields = set(fields)
|
||||
fields.add("parent")
|
||||
|
||||
versions = _get_versions(
|
||||
project_name,
|
||||
subset_ids=subset_ids,
|
||||
latest=True,
|
||||
hero=False,
|
||||
active=active,
|
||||
fields=fields
|
||||
)
|
||||
return {
|
||||
version["parent"]: version
|
||||
for version in versions
|
||||
}
|
||||
|
||||
|
||||
def get_last_version_by_subset_id(project_name, subset_id, fields=None):
|
||||
versions = _get_versions(
|
||||
project_name,
|
||||
subset_ids=[subset_id],
|
||||
latest=True,
|
||||
hero=False,
|
||||
fields=fields
|
||||
)
|
||||
if not versions:
|
||||
return None
|
||||
return versions[0]
|
||||
|
||||
|
||||
def get_last_version_by_subset_name(
|
||||
project_name,
|
||||
subset_name,
|
||||
asset_id=None,
|
||||
asset_name=None,
|
||||
fields=None
|
||||
):
|
||||
if not asset_id and not asset_name:
|
||||
return None
|
||||
|
||||
if not asset_id:
|
||||
asset = get_asset_by_name(
|
||||
project_name, asset_name, fields=["_id"]
|
||||
)
|
||||
if not asset:
|
||||
return None
|
||||
asset_id = asset["_id"]
|
||||
|
||||
subset = get_subset_by_name(
|
||||
project_name, subset_name, asset_id, fields=["_id"]
|
||||
)
|
||||
if not subset:
|
||||
return None
|
||||
return get_last_version_by_subset_id(
|
||||
project_name, subset["_id"], fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_output_link_versions(project_name, version_id, fields=None):
|
||||
if not version_id:
|
||||
return []
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
version_links = con.get_version_links(
|
||||
project_name, version_id, link_direction="out")
|
||||
|
||||
version_ids = {
|
||||
link["entityId"]
|
||||
for link in version_links
|
||||
if link["entityType"] == "version"
|
||||
}
|
||||
if not version_ids:
|
||||
return []
|
||||
|
||||
return get_versions(project_name, version_ids=version_ids, fields=fields)
|
||||
|
||||
|
||||
def version_is_latest(project_name, version_id):
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.version_is_latest(project_name, version_id)
|
||||
|
||||
|
||||
def get_representation_by_id(project_name, representation_id, fields=None):
|
||||
representations = get_representations(
|
||||
project_name,
|
||||
representation_ids=[representation_id],
|
||||
fields=fields
|
||||
)
|
||||
for representation in representations:
|
||||
return representation
|
||||
return None
|
||||
|
||||
|
||||
def get_representation_by_name(
|
||||
project_name, representation_name, version_id, fields=None
|
||||
):
|
||||
representations = get_representations(
|
||||
project_name,
|
||||
representation_names=[representation_name],
|
||||
version_ids=[version_id],
|
||||
fields=fields
|
||||
)
|
||||
for representation in representations:
|
||||
return representation
|
||||
return None
|
||||
|
||||
|
||||
def get_representations(
|
||||
project_name,
|
||||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
context_filters=None,
|
||||
names_by_version_ids=None,
|
||||
archived=False,
|
||||
standard=True,
|
||||
fields=None
|
||||
):
|
||||
if context_filters is not None:
|
||||
# TODO should we add the support?
|
||||
# - there was ability to fitler using regex
|
||||
raise ValueError("OP v4 can't filter by representation context.")
|
||||
|
||||
if not archived and not standard:
|
||||
return
|
||||
|
||||
if archived and not standard:
|
||||
active = False
|
||||
elif not archived and standard:
|
||||
active = True
|
||||
else:
|
||||
active = None
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
fields = representation_fields_v3_to_v4(fields, con)
|
||||
if fields and active is not None:
|
||||
fields.add("active")
|
||||
|
||||
representations = con.get_representations(
|
||||
project_name,
|
||||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
active=active,
|
||||
fields=fields
|
||||
)
|
||||
for representation in representations:
|
||||
yield convert_v4_representation_to_v3(representation)
|
||||
|
||||
|
||||
def get_representation_parents(project_name, representation):
|
||||
if not representation:
|
||||
return None
|
||||
|
||||
repre_id = representation["_id"]
|
||||
parents_by_repre_id = get_representations_parents(
|
||||
project_name, [representation]
|
||||
)
|
||||
return parents_by_repre_id[repre_id]
|
||||
|
||||
|
||||
def get_representations_parents(project_name, representations):
|
||||
repre_ids = {
|
||||
repre["_id"]
|
||||
for repre in representations
|
||||
}
|
||||
con = get_ayon_server_api_connection()
|
||||
parents_by_repre_id = con.get_representations_parents(project_name,
|
||||
repre_ids)
|
||||
folder_ids = set()
|
||||
for parents in parents_by_repre_id .values():
|
||||
folder_ids.add(parents[2]["id"])
|
||||
|
||||
tasks_by_folder_id = {}
|
||||
|
||||
new_parents = {}
|
||||
for repre_id, parents in parents_by_repre_id .items():
|
||||
version, subset, folder, project = parents
|
||||
folder_tasks = tasks_by_folder_id.get(folder["id"]) or {}
|
||||
folder["tasks"] = folder_tasks
|
||||
new_parents[repre_id] = (
|
||||
convert_v4_version_to_v3(version),
|
||||
convert_v4_subset_to_v3(subset),
|
||||
convert_v4_folder_to_v3(folder, project_name),
|
||||
project
|
||||
)
|
||||
return new_parents
|
||||
|
||||
|
||||
def get_archived_representations(
|
||||
project_name,
|
||||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
context_filters=None,
|
||||
names_by_version_ids=None,
|
||||
fields=None
|
||||
):
|
||||
return get_representations(
|
||||
project_name,
|
||||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
context_filters=context_filters,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
archived=True,
|
||||
standard=False,
|
||||
fields=fields
|
||||
)
|
||||
|
||||
|
||||
def get_thumbnail(
|
||||
project_name, thumbnail_id, entity_type, entity_id, fields=None
|
||||
):
|
||||
"""Receive thumbnail entity data.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity.
|
||||
entity_type (str): Type of entity for which the thumbnail should be
|
||||
received.
|
||||
entity_id (str): Id of entity for which the thumbnail should be
|
||||
received.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
None: If thumbnail with specified id was not found.
|
||||
Dict: Thumbnail entity data which can be reduced to specified 'fields'.
|
||||
"""
|
||||
|
||||
if not thumbnail_id or not entity_type or not entity_id:
|
||||
return None
|
||||
|
||||
if entity_type == "asset":
|
||||
entity_type = "folder"
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
entity_type = "version"
|
||||
|
||||
return {
|
||||
"_id": thumbnail_id,
|
||||
"type": "thumbnail",
|
||||
"schema": CURRENT_THUMBNAIL_SCHEMA,
|
||||
"data": {
|
||||
"entity_type": entity_type,
|
||||
"entity_id": entity_id
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_thumbnails(project_name, thumbnail_contexts, fields=None):
|
||||
"""Get thumbnail entities.
|
||||
|
||||
Warning:
|
||||
This function is not OpenPype compatible. There is none usage of this
|
||||
function in codebase so there is nothing to convert. The previous
|
||||
implementation cannot be AYON compatible without entity types.
|
||||
"""
|
||||
|
||||
thumbnail_items = set()
|
||||
for thumbnail_context in thumbnail_contexts:
|
||||
thumbnail_id, entity_type, entity_id = thumbnail_context
|
||||
thumbnail_item = get_thumbnail(
|
||||
project_name, thumbnail_id, entity_type, entity_id
|
||||
)
|
||||
if thumbnail_item:
|
||||
thumbnail_items.add(thumbnail_item)
|
||||
return list(thumbnail_items)
|
||||
|
||||
|
||||
def get_thumbnail_id_from_source(project_name, src_type, src_id):
|
||||
"""Receive thumbnail id from source entity.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
src_type (str): Type of source entity ('asset', 'version').
|
||||
src_id (Union[str, ObjectId]): Id of source entity.
|
||||
|
||||
Returns:
|
||||
ObjectId: Thumbnail id assigned to entity.
|
||||
None: If Source entity does not have any thumbnail id assigned.
|
||||
"""
|
||||
|
||||
if not src_type or not src_id:
|
||||
return None
|
||||
|
||||
if src_type == "version":
|
||||
version = get_version_by_id(
|
||||
project_name, src_id, fields=["data.thumbnail_id"]
|
||||
) or {}
|
||||
return version.get("data", {}).get("thumbnail_id")
|
||||
|
||||
if src_type == "asset":
|
||||
asset = get_asset_by_id(
|
||||
project_name, src_id, fields=["data.thumbnail_id"]
|
||||
) or {}
|
||||
return asset.get("data", {}).get("thumbnail_id")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_workfile_info(
|
||||
project_name, asset_id, task_name, filename, fields=None
|
||||
):
|
||||
if not asset_id or not task_name or not filename:
|
||||
return None
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
task = con.get_task_by_name(
|
||||
project_name, asset_id, task_name, fields=["id", "name", "folderId"]
|
||||
)
|
||||
if not task:
|
||||
return None
|
||||
|
||||
fields = workfile_info_fields_v3_to_v4(fields)
|
||||
|
||||
for workfile_info in con.get_workfiles_info(
|
||||
project_name, task_ids=[task["id"]], fields=fields
|
||||
):
|
||||
if workfile_info["name"] == filename:
|
||||
return convert_v4_workfile_info_to_v3(workfile_info, task)
|
||||
return None
|
||||
|
|
@ -1,157 +0,0 @@
|
|||
from .utils import get_ayon_server_api_connection
|
||||
from .entities import get_assets, get_representation_by_id
|
||||
|
||||
|
||||
def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None):
|
||||
"""Extract linked asset ids from asset document.
|
||||
|
||||
One of asset document or asset id must be passed.
|
||||
|
||||
Note:
|
||||
Asset links now works only from asset to assets.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where to look for asset.
|
||||
asset_doc (dict): Asset document from DB.
|
||||
asset_id (str): Asset id to find its document.
|
||||
|
||||
Returns:
|
||||
List[Union[ObjectId, str]]: Asset ids of input links.
|
||||
"""
|
||||
|
||||
output = []
|
||||
if not asset_doc and not asset_id:
|
||||
return output
|
||||
|
||||
if not asset_id:
|
||||
asset_id = asset_doc["_id"]
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
links = con.get_folder_links(project_name, asset_id, link_direction="in")
|
||||
return [
|
||||
link["entityId"]
|
||||
for link in links
|
||||
if link["entityType"] == "folder"
|
||||
]
|
||||
|
||||
|
||||
def get_linked_assets(
|
||||
project_name, asset_doc=None, asset_id=None, fields=None
|
||||
):
|
||||
"""Return linked assets based on passed asset document.
|
||||
|
||||
One of asset document or asset id must be passed.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for queried entities.
|
||||
asset_doc (Dict[str, Any]): Asset document from database.
|
||||
asset_id (Union[ObjectId, str]): Asset id. Can be used instead of
|
||||
asset document.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
returned if 'None' is passed.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: Asset documents of input links for passed
|
||||
asset doc.
|
||||
"""
|
||||
|
||||
link_ids = get_linked_asset_ids(project_name, asset_doc, asset_id)
|
||||
if not link_ids:
|
||||
return []
|
||||
return list(get_assets(project_name, asset_ids=link_ids, fields=fields))
|
||||
|
||||
|
||||
|
||||
def get_linked_representation_id(
|
||||
project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None
|
||||
):
|
||||
"""Returns list of linked ids of particular type (if provided).
|
||||
|
||||
One of representation document or representation id must be passed.
|
||||
Note:
|
||||
Representation links now works only from representation through version
|
||||
back to representations.
|
||||
|
||||
Todos:
|
||||
Missing depth query. Not sure how it did find more representations in
|
||||
depth, probably links to version?
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where look for links.
|
||||
repre_doc (Dict[str, Any]): Representation document.
|
||||
repre_id (Union[ObjectId, str]): Representation id.
|
||||
link_type (str): Type of link (e.g. 'reference', ...).
|
||||
max_depth (int): Limit recursion level. Default: 0
|
||||
|
||||
Returns:
|
||||
List[ObjectId] Linked representation ids.
|
||||
"""
|
||||
|
||||
if repre_doc:
|
||||
repre_id = repre_doc["_id"]
|
||||
|
||||
if not repre_id and not repre_doc:
|
||||
return []
|
||||
|
||||
version_id = None
|
||||
if repre_doc:
|
||||
version_id = repre_doc.get("parent")
|
||||
|
||||
if not version_id:
|
||||
repre_doc = get_representation_by_id(
|
||||
project_name, repre_id, fields=["parent"]
|
||||
)
|
||||
if repre_doc:
|
||||
version_id = repre_doc["parent"]
|
||||
|
||||
if not version_id:
|
||||
return []
|
||||
|
||||
if max_depth is None or max_depth == 0:
|
||||
max_depth = 1
|
||||
|
||||
link_types = None
|
||||
if link_type:
|
||||
link_types = [link_type]
|
||||
|
||||
con = get_ayon_server_api_connection()
|
||||
# Store already found version ids to avoid recursion, and also to store
|
||||
# output -> Don't forget to remove 'version_id' at the end!!!
|
||||
linked_version_ids = {version_id}
|
||||
# Each loop of depth will reset this variable
|
||||
versions_to_check = {version_id}
|
||||
for _ in range(max_depth):
|
||||
if not versions_to_check:
|
||||
break
|
||||
|
||||
versions_links = con.get_versions_links(
|
||||
project_name,
|
||||
versions_to_check,
|
||||
link_types=link_types,
|
||||
link_direction="out")
|
||||
|
||||
versions_to_check = set()
|
||||
for links in versions_links.values():
|
||||
for link in links:
|
||||
# Care only about version links
|
||||
if link["entityType"] != "version":
|
||||
continue
|
||||
entity_id = link["entityId"]
|
||||
# Skip already found linked version ids
|
||||
if entity_id in linked_version_ids:
|
||||
continue
|
||||
linked_version_ids.add(entity_id)
|
||||
versions_to_check.add(entity_id)
|
||||
|
||||
linked_version_ids.remove(version_id)
|
||||
if not linked_version_ids:
|
||||
return []
|
||||
con = get_ayon_server_api_connection()
|
||||
representations = con.get_representations(
|
||||
project_name,
|
||||
version_ids=linked_version_ids,
|
||||
fields=["id"])
|
||||
return [
|
||||
repre["id"]
|
||||
for repre in representations
|
||||
]
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
# Client functionality
|
||||
## Reason
|
||||
Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code.
|
||||
|
||||
Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tightly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state.
|
||||
|
||||
## Queries
|
||||
Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity.
|
||||
|
||||
## Changes
|
||||
Changes are a little bit complicated. Mongo has many options how update can happen which had to be reduced also it would be at this stage complicated to validate values which are created or updated thus automation is at this point almost none. Changes can be made using operations available in `~/client/operations.py`. Each operation require project name and entity type, but may require operation specific data.
|
||||
|
||||
### Create
|
||||
Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues.
|
||||
|
||||
### Update
|
||||
Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare_<entity type>_update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementation.
|
||||
|
||||
### Delete
|
||||
Delete operation need entity id. Entity will be deleted from mongo.
|
||||
|
||||
|
||||
## What (probably) won't be replaced
|
||||
Some parts of code are still using direct mongo calls. In most of cases it is for very specific calls that are module specific or their usage will completely change in future.
|
||||
- Mongo calls that are not project specific (out of `avalon` collection) will be removed or will have to use different mechanism how the data are stored. At this moment it is related to OpenPype settings and logs, ftrack server events, some other data.
|
||||
- Sync server queries. They're complex and very specific for sync server module. Their replacement will require specific calls to OpenPype server in v4 thus their abstraction with wrapper is irrelevant and would complicate production in v3.
|
||||
- Project managers (ftrack, kitsu, shotgrid, embedded Project Manager, etc.). Project managers are creating, updating or removing assets in v3, but in v4 will create folders with different structure. Wrapping creation of assets would not help to prepare for v4 because of new data structures. The same can be said about editorial Extract Hierarchy Avalon plugin which create project structure.
|
||||
- Code parts that is marked as deprecated in v3 or will be deprecated in v4.
|
||||
- integrate asset legacy publish plugin - already is legacy kept for safety
|
||||
- integrate thumbnail - thumbnails will be stored in different way in v4
|
||||
- input links - link will be stored in different way and will have different mechanism of linking. In v3 are links limited to same entity type "asset <-> asset" or "representation <-> representation".
|
||||
|
||||
## Known missing replacements
|
||||
- change subset group in loader tool
|
||||
- integrate subset group
|
||||
- query input links in openpype lib
|
||||
- create project in openpype lib
|
||||
- save/create workfile doc in openpype lib
|
||||
- integrate hero version
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
import collections
|
||||
import json
|
||||
|
||||
import six
|
||||
from ayon_api.graphql import GraphQlQuery, FIELD_VALUE, fields_to_dict
|
||||
|
||||
from .constants import DEFAULT_FOLDER_FIELDS
|
||||
|
||||
|
||||
def folders_tasks_graphql_query(fields):
|
||||
query = GraphQlQuery("FoldersQuery")
|
||||
project_name_var = query.add_variable("projectName", "String!")
|
||||
folder_ids_var = query.add_variable("folderIds", "[String!]")
|
||||
parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]")
|
||||
folder_paths_var = query.add_variable("folderPaths", "[String!]")
|
||||
folder_names_var = query.add_variable("folderNames", "[String!]")
|
||||
has_products_var = query.add_variable("folderHasProducts", "Boolean!")
|
||||
|
||||
project_field = query.add_field("project")
|
||||
project_field.set_filter("name", project_name_var)
|
||||
|
||||
folders_field = project_field.add_field_with_edges("folders")
|
||||
folders_field.set_filter("ids", folder_ids_var)
|
||||
folders_field.set_filter("parentIds", parent_folder_ids_var)
|
||||
folders_field.set_filter("names", folder_names_var)
|
||||
folders_field.set_filter("paths", folder_paths_var)
|
||||
folders_field.set_filter("hasProducts", has_products_var)
|
||||
|
||||
fields = set(fields)
|
||||
fields.discard("tasks")
|
||||
tasks_field = folders_field.add_field_with_edges("tasks")
|
||||
tasks_field.add_field("name")
|
||||
tasks_field.add_field("taskType")
|
||||
|
||||
nested_fields = fields_to_dict(fields)
|
||||
|
||||
query_queue = collections.deque()
|
||||
for key, value in nested_fields.items():
|
||||
query_queue.append((key, value, folders_field))
|
||||
|
||||
while query_queue:
|
||||
item = query_queue.popleft()
|
||||
key, value, parent = item
|
||||
field = parent.add_field(key)
|
||||
if value is FIELD_VALUE:
|
||||
continue
|
||||
|
||||
for k, v in value.items():
|
||||
query_queue.append((k, v, field))
|
||||
return query
|
||||
|
||||
|
||||
def get_folders_with_tasks(
|
||||
con,
|
||||
project_name,
|
||||
folder_ids=None,
|
||||
folder_paths=None,
|
||||
folder_names=None,
|
||||
parent_ids=None,
|
||||
active=True,
|
||||
fields=None
|
||||
):
|
||||
"""Query folders with tasks from server.
|
||||
|
||||
This is for v4 compatibility where tasks were stored on assets. This is
|
||||
an inefficient way how folders and tasks are queried so it was added only
|
||||
as compatibility function.
|
||||
|
||||
Todos:
|
||||
Folder name won't be unique identifier, so we should add folder path
|
||||
filtering.
|
||||
|
||||
Notes:
|
||||
Filter 'active' don't have direct filter in GraphQl.
|
||||
|
||||
Args:
|
||||
con (ServerAPI): Connection to server.
|
||||
project_name (str): Name of project where folders are.
|
||||
folder_ids (Iterable[str]): Folder ids to filter.
|
||||
folder_paths (Iterable[str]): Folder paths used for filtering.
|
||||
folder_names (Iterable[str]): Folder names used for filtering.
|
||||
parent_ids (Iterable[str]): Ids of folder parents. Use 'None'
|
||||
if folder is direct child of project.
|
||||
active (Union[bool, None]): Filter active/inactive folders. Both
|
||||
are returned if is set to None.
|
||||
fields (Union[Iterable(str), None]): Fields to be queried
|
||||
for folder. All possible folder fields are returned if 'None'
|
||||
is passed.
|
||||
|
||||
Yields:
|
||||
Dict[str, Any]: Queried folder entities.
|
||||
"""
|
||||
|
||||
if not project_name:
|
||||
return
|
||||
|
||||
filters = {
|
||||
"projectName": project_name
|
||||
}
|
||||
if folder_ids is not None:
|
||||
folder_ids = set(folder_ids)
|
||||
if not folder_ids:
|
||||
return
|
||||
filters["folderIds"] = list(folder_ids)
|
||||
|
||||
if folder_paths is not None:
|
||||
folder_paths = set(folder_paths)
|
||||
if not folder_paths:
|
||||
return
|
||||
filters["folderPaths"] = list(folder_paths)
|
||||
|
||||
if folder_names is not None:
|
||||
folder_names = set(folder_names)
|
||||
if not folder_names:
|
||||
return
|
||||
filters["folderNames"] = list(folder_names)
|
||||
|
||||
if parent_ids is not None:
|
||||
parent_ids = set(parent_ids)
|
||||
if not parent_ids:
|
||||
return
|
||||
if None in parent_ids:
|
||||
# Replace 'None' with '"root"' which is used during GraphQl
|
||||
# query for parent ids filter for folders without folder
|
||||
# parent
|
||||
parent_ids.remove(None)
|
||||
parent_ids.add("root")
|
||||
|
||||
if project_name in parent_ids:
|
||||
# Replace project name with '"root"' which is used during
|
||||
# GraphQl query for parent ids filter for folders without
|
||||
# folder parent
|
||||
parent_ids.remove(project_name)
|
||||
parent_ids.add("root")
|
||||
|
||||
filters["parentFolderIds"] = list(parent_ids)
|
||||
|
||||
if fields:
|
||||
fields = set(fields)
|
||||
else:
|
||||
fields = con.get_default_fields_for_type("folder")
|
||||
fields |= DEFAULT_FOLDER_FIELDS
|
||||
|
||||
if active is not None:
|
||||
fields.add("active")
|
||||
|
||||
query = folders_tasks_graphql_query(fields)
|
||||
for attr, filter_value in filters.items():
|
||||
query.set_variable_value(attr, filter_value)
|
||||
|
||||
parsed_data = query.query(con)
|
||||
folders = parsed_data["project"]["folders"]
|
||||
for folder in folders:
|
||||
if active is not None and folder["active"] is not active:
|
||||
continue
|
||||
folder_data = folder.get("data")
|
||||
if isinstance(folder_data, six.string_types):
|
||||
folder["data"] = json.loads(folder_data)
|
||||
yield folder
|
||||
|
|
@ -1,880 +0,0 @@
|
|||
import copy
|
||||
import json
|
||||
import collections
|
||||
import uuid
|
||||
import datetime
|
||||
|
||||
from ayon_api.server_api import (
|
||||
PROJECT_NAME_ALLOWED_SYMBOLS,
|
||||
PROJECT_NAME_REGEX,
|
||||
)
|
||||
|
||||
from .constants import (
|
||||
CURRENT_PROJECT_SCHEMA,
|
||||
CURRENT_PROJECT_CONFIG_SCHEMA,
|
||||
CURRENT_ASSET_DOC_SCHEMA,
|
||||
CURRENT_SUBSET_SCHEMA,
|
||||
CURRENT_VERSION_SCHEMA,
|
||||
CURRENT_HERO_VERSION_SCHEMA,
|
||||
CURRENT_REPRESENTATION_SCHEMA,
|
||||
CURRENT_WORKFILE_INFO_SCHEMA,
|
||||
CURRENT_THUMBNAIL_SCHEMA,
|
||||
)
|
||||
from .operations_base import (
|
||||
REMOVED_VALUE,
|
||||
CreateOperation,
|
||||
UpdateOperation,
|
||||
DeleteOperation,
|
||||
BaseOperationsSession
|
||||
)
|
||||
from .conversion_utils import (
|
||||
convert_create_asset_to_v4,
|
||||
convert_create_task_to_v4,
|
||||
convert_create_subset_to_v4,
|
||||
convert_create_version_to_v4,
|
||||
convert_create_hero_version_to_v4,
|
||||
convert_create_representation_to_v4,
|
||||
convert_create_workfile_info_to_v4,
|
||||
|
||||
convert_update_folder_to_v4,
|
||||
convert_update_subset_to_v4,
|
||||
convert_update_version_to_v4,
|
||||
convert_update_hero_version_to_v4,
|
||||
convert_update_representation_to_v4,
|
||||
convert_update_workfile_info_to_v4,
|
||||
)
|
||||
from .utils import create_entity_id, get_ayon_server_api_connection
|
||||
|
||||
|
||||
def _create_or_convert_to_id(entity_id=None):
|
||||
if entity_id is None:
|
||||
return create_entity_id()
|
||||
|
||||
# Validate if can be converted to uuid
|
||||
uuid.UUID(entity_id)
|
||||
return entity_id
|
||||
|
||||
|
||||
def new_project_document(
|
||||
project_name, project_code, config, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of project document.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project. Used as identifier of a project.
|
||||
project_code (str): Shorter version of projet without spaces and
|
||||
special characters (in most of cases). Should be also considered
|
||||
as unique name across projects.
|
||||
config (Dic[str, Any]): Project config consist of roots, templates,
|
||||
applications and other project Anatomy related data.
|
||||
data (Dict[str, Any]): Project data with information about it's
|
||||
attributes (e.g. 'fps' etc.) or integration specific keys.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of project document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
data["code"] = project_code
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"name": project_name,
|
||||
"type": CURRENT_PROJECT_SCHEMA,
|
||||
"entity_data": data,
|
||||
"config": config
|
||||
}
|
||||
|
||||
|
||||
def new_asset_document(
|
||||
name, project_id, parent_id, parents, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of asset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of asset in project.
|
||||
project_id (Union[str, ObjectId]): Id of project doument.
|
||||
parent_id (Union[str, ObjectId]): Id of parent asset.
|
||||
parents (List[str]): List of parent assets names.
|
||||
data (Dict[str, Any]): Asset document data. Empty dictionary is used
|
||||
if not passed. Value of 'parent_id' is used to fill 'visualParent'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of asset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
if parent_id is not None:
|
||||
parent_id = _create_or_convert_to_id(parent_id)
|
||||
data["visualParent"] = parent_id
|
||||
data["parents"] = parents
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"type": "asset",
|
||||
"name": name,
|
||||
# This will be ignored
|
||||
"parent": project_id,
|
||||
"data": data,
|
||||
"schema": CURRENT_ASSET_DOC_SCHEMA
|
||||
}
|
||||
|
||||
|
||||
def new_subset_document(name, family, asset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of subset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of subset under asset.
|
||||
family (str): Subset's family.
|
||||
asset_id (Union[str, ObjectId]): Id of parent asset.
|
||||
data (Dict[str, Any]): Subset document data. Empty dictionary is used
|
||||
if not passed. Value of 'family' is used to fill 'family'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of subset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
data["family"] = family
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_SUBSET_SCHEMA,
|
||||
"type": "subset",
|
||||
"name": name,
|
||||
"data": data,
|
||||
"parent": _create_or_convert_to_id(asset_id)
|
||||
}
|
||||
|
||||
|
||||
def new_version_doc(version, subset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of version document.
|
||||
|
||||
Args:
|
||||
version (int): Is considered as unique identifier of version
|
||||
under subset.
|
||||
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||
data (Dict[str, Any]): Version document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_VERSION_SCHEMA,
|
||||
"type": "version",
|
||||
"name": int(version),
|
||||
"parent": _create_or_convert_to_id(subset_id),
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_hero_version_doc(subset_id, data, version=None, entity_id=None):
|
||||
"""Create skeleton data of hero version document.
|
||||
|
||||
Args:
|
||||
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||
data (Dict[str, Any]): Version document data.
|
||||
version (int): Version of source version.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if version is None:
|
||||
version = -1
|
||||
elif version > 0:
|
||||
version = -version
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_HERO_VERSION_SCHEMA,
|
||||
"type": "hero_version",
|
||||
"version": version,
|
||||
"parent": _create_or_convert_to_id(subset_id),
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_representation_doc(
|
||||
name, version_id, context, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of representation document.
|
||||
|
||||
Args:
|
||||
name (str): Representation name considered as unique identifier
|
||||
of representation under version.
|
||||
version_id (Union[str, ObjectId]): Id of parent version.
|
||||
context (Dict[str, Any]): Representation context used for fill template
|
||||
of to query.
|
||||
data (Dict[str, Any]): Representation document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"schema": CURRENT_REPRESENTATION_SCHEMA,
|
||||
"type": "representation",
|
||||
"parent": _create_or_convert_to_id(version_id),
|
||||
"name": name,
|
||||
"data": data,
|
||||
|
||||
# Imprint shortcut to context for performance reasons.
|
||||
"context": context
|
||||
}
|
||||
|
||||
|
||||
def new_thumbnail_doc(data=None, entity_id=None):
|
||||
"""Create skeleton data of thumbnail document.
|
||||
|
||||
Args:
|
||||
data (Dict[str, Any]): Thumbnail document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of thumbnail document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"type": "thumbnail",
|
||||
"schema": CURRENT_THUMBNAIL_SCHEMA,
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_workfile_info_doc(
|
||||
filename, asset_id, task_name, files, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of workfile info document.
|
||||
|
||||
Workfile document is at this moment used primarily for artist notes.
|
||||
|
||||
Args:
|
||||
filename (str): Filename of workfile.
|
||||
asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
|
||||
task_name (str): Task under which was workfile created.
|
||||
files (List[str]): List of rootless filepaths related to workfile.
|
||||
data (Dict[str, Any]): Additional metadata.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of workfile info document.
|
||||
"""
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_id(entity_id),
|
||||
"type": "workfile",
|
||||
"parent": _create_or_convert_to_id(asset_id),
|
||||
"task_name": task_name,
|
||||
"filename": filename,
|
||||
"data": data,
|
||||
"files": files
|
||||
}
|
||||
|
||||
|
||||
def _prepare_update_data(old_doc, new_doc, replace):
|
||||
changes = {}
|
||||
for key, value in new_doc.items():
|
||||
if key not in old_doc or value != old_doc[key]:
|
||||
changes[key] = value
|
||||
|
||||
if replace:
|
||||
for key in old_doc.keys():
|
||||
if key not in new_doc:
|
||||
changes[key] = REMOVED_VALUE
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_subset_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two subset documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_version_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two version documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two hero version documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
changes = _prepare_update_data(old_doc, new_doc, replace)
|
||||
changes.pop("version_id", None)
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_representation_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two representation documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
changes = _prepare_update_data(old_doc, new_doc, replace)
|
||||
context = changes.get("data", {}).get("context")
|
||||
# Make sure that both 'family' and 'subset' are in changes if
|
||||
# one of them changed (they'll both become 'product').
|
||||
if (
|
||||
context
|
||||
and ("family" in context or "subset" in context)
|
||||
):
|
||||
context["family"] = new_doc["data"]["context"]["family"]
|
||||
context["subset"] = new_doc["data"]["context"]["subset"]
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two workfile info documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for
|
||||
'MongoUpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
class FailedOperations(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def entity_data_json_default(value):
|
||||
if isinstance(value, datetime.datetime):
|
||||
return int(value.timestamp())
|
||||
|
||||
raise TypeError(
|
||||
"Object of type {} is not JSON serializable".format(str(type(value)))
|
||||
)
|
||||
|
||||
|
||||
def failed_json_default(value):
|
||||
return "< Failed value {} > {}".format(type(value), str(value))
|
||||
|
||||
|
||||
class ServerCreateOperation(CreateOperation):
|
||||
"""Operation to create an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
data (Dict[str, Any]): Data of entity that will be created.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type, data, session):
|
||||
self._session = session
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
data = copy.deepcopy(data)
|
||||
if entity_type == "project":
|
||||
raise ValueError("Project cannot be created using operations")
|
||||
|
||||
tasks = None
|
||||
if entity_type in "asset":
|
||||
# TODO handle tasks
|
||||
entity_type = "folder"
|
||||
if "data" in data:
|
||||
tasks = data["data"].get("tasks")
|
||||
|
||||
project = self._session.get_project(project_name)
|
||||
new_data = convert_create_asset_to_v4(data, project, self.con)
|
||||
|
||||
elif entity_type == "task":
|
||||
project = self._session.get_project(project_name)
|
||||
new_data = convert_create_task_to_v4(data, project, self.con)
|
||||
|
||||
elif entity_type == "subset":
|
||||
new_data = convert_create_subset_to_v4(data, self.con)
|
||||
entity_type = "product"
|
||||
|
||||
elif entity_type == "version":
|
||||
new_data = convert_create_version_to_v4(data, self.con)
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
new_data = convert_create_hero_version_to_v4(
|
||||
data, project_name, self.con
|
||||
)
|
||||
entity_type = "version"
|
||||
|
||||
elif entity_type in ("representation", "archived_representation"):
|
||||
new_data = convert_create_representation_to_v4(data, self.con)
|
||||
entity_type = "representation"
|
||||
|
||||
elif entity_type == "workfile":
|
||||
new_data = convert_create_workfile_info_to_v4(
|
||||
data, project_name, self.con
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unhandled entity type \"{}\"".format(entity_type)
|
||||
)
|
||||
|
||||
# Simple check if data can be dumped into json
|
||||
# - should raise error on 'ObjectId' object
|
||||
try:
|
||||
new_data = json.loads(
|
||||
json.dumps(new_data, default=entity_data_json_default)
|
||||
)
|
||||
|
||||
except:
|
||||
raise ValueError("Couldn't json parse body: {}".format(
|
||||
json.dumps(new_data, default=failed_json_default)
|
||||
))
|
||||
|
||||
super(ServerCreateOperation, self).__init__(
|
||||
project_name, entity_type, new_data
|
||||
)
|
||||
|
||||
if "id" not in self._data:
|
||||
self._data["id"] = create_entity_id()
|
||||
|
||||
if tasks:
|
||||
copied_tasks = copy.deepcopy(tasks)
|
||||
for task_name, task in copied_tasks.items():
|
||||
task["name"] = task_name
|
||||
task["folderId"] = self._data["id"]
|
||||
self.session.create_entity(
|
||||
project_name, "task", task, nested_id=self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self.session.con
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._data["id"]
|
||||
|
||||
def to_server_operation(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": "create",
|
||||
"entityType": self.entity_type,
|
||||
"entityId": self.entity_id,
|
||||
"data": self._data
|
||||
}
|
||||
|
||||
|
||||
class ServerUpdateOperation(UpdateOperation):
|
||||
"""Operation to update an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Identifier of an entity.
|
||||
update_data (Dict[str, Any]): Key -> value changes that will be set in
|
||||
database. If value is set to 'REMOVED_VALUE' the key will be
|
||||
removed. Only first level of dictionary is checked (on purpose).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, project_name, entity_type, entity_id, update_data, session
|
||||
):
|
||||
self._session = session
|
||||
|
||||
update_data = copy.deepcopy(update_data)
|
||||
if entity_type == "project":
|
||||
raise ValueError("Project cannot be created using operations")
|
||||
|
||||
if entity_type in ("asset", "archived_asset"):
|
||||
new_update_data = convert_update_folder_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "folder"
|
||||
|
||||
elif entity_type == "subset":
|
||||
new_update_data = convert_update_subset_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "product"
|
||||
|
||||
elif entity_type == "version":
|
||||
new_update_data = convert_update_version_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
new_update_data = convert_update_hero_version_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "version"
|
||||
|
||||
elif entity_type in ("representation", "archived_representation"):
|
||||
new_update_data = convert_update_representation_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
entity_type = "representation"
|
||||
|
||||
elif entity_type == "workfile":
|
||||
new_update_data = convert_update_workfile_info_to_v4(
|
||||
project_name, entity_id, update_data, self.con
|
||||
)
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unhandled entity type \"{}\"".format(entity_type)
|
||||
)
|
||||
|
||||
try:
|
||||
new_update_data = json.loads(
|
||||
json.dumps(new_update_data, default=entity_data_json_default)
|
||||
)
|
||||
|
||||
except:
|
||||
raise ValueError("Couldn't json parse body: {}".format(
|
||||
json.dumps(new_update_data, default=failed_json_default)
|
||||
))
|
||||
|
||||
super(ServerUpdateOperation, self).__init__(
|
||||
project_name, entity_type, entity_id, new_update_data
|
||||
)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self.session.con
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
def to_server_operation(self):
|
||||
if not self._update_data:
|
||||
return None
|
||||
|
||||
update_data = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
value = None
|
||||
update_data[key] = value
|
||||
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": "update",
|
||||
"entityType": self.entity_type,
|
||||
"entityId": self.entity_id,
|
||||
"data": update_data
|
||||
}
|
||||
|
||||
|
||||
class ServerDeleteOperation(DeleteOperation):
|
||||
"""Operation to delete an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Entity id that will be removed.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id, session):
|
||||
self._session = session
|
||||
|
||||
if entity_type == "asset":
|
||||
entity_type = "folder"
|
||||
|
||||
elif entity_type == "hero_version":
|
||||
entity_type = "version"
|
||||
|
||||
elif entity_type == "subset":
|
||||
entity_type = "product"
|
||||
|
||||
super(ServerDeleteOperation, self).__init__(
|
||||
project_name, entity_type, entity_id
|
||||
)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self.session.con
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
def to_server_operation(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": self.operation_name,
|
||||
"entityId": self.entity_id,
|
||||
"entityType": self.entity_type,
|
||||
}
|
||||
|
||||
|
||||
class OperationsSession(BaseOperationsSession):
|
||||
def __init__(self, con=None, *args, **kwargs):
|
||||
super(OperationsSession, self).__init__(*args, **kwargs)
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
self._con = con
|
||||
self._project_cache = {}
|
||||
self._nested_operations = collections.defaultdict(list)
|
||||
|
||||
@property
|
||||
def con(self):
|
||||
return self._con
|
||||
|
||||
def get_project(self, project_name):
|
||||
if project_name not in self._project_cache:
|
||||
self._project_cache[project_name] = self.con.get_project(
|
||||
project_name)
|
||||
return copy.deepcopy(self._project_cache[project_name])
|
||||
|
||||
def commit(self):
|
||||
"""Commit session operations."""
|
||||
|
||||
operations, self._operations = self._operations, []
|
||||
if not operations:
|
||||
return
|
||||
|
||||
operations_by_project = collections.defaultdict(list)
|
||||
for operation in operations:
|
||||
operations_by_project[operation.project_name].append(operation)
|
||||
|
||||
body_by_id = {}
|
||||
results = []
|
||||
for project_name, operations in operations_by_project.items():
|
||||
operations_body = []
|
||||
for operation in operations:
|
||||
body = operation.to_server_operation()
|
||||
if body is not None:
|
||||
try:
|
||||
json.dumps(body)
|
||||
except:
|
||||
raise ValueError("Couldn't json parse body: {}".format(
|
||||
json.dumps(
|
||||
body, indent=4, default=failed_json_default
|
||||
)
|
||||
))
|
||||
|
||||
body_by_id[operation.id] = body
|
||||
operations_body.append(body)
|
||||
|
||||
if operations_body:
|
||||
result = self._con.post(
|
||||
"projects/{}/operations".format(project_name),
|
||||
operations=operations_body,
|
||||
canFail=False
|
||||
)
|
||||
results.append(result.data)
|
||||
|
||||
for result in results:
|
||||
if result.get("success"):
|
||||
continue
|
||||
|
||||
if "operations" not in result:
|
||||
raise FailedOperations(
|
||||
"Operation failed. Content: {}".format(str(result))
|
||||
)
|
||||
|
||||
for op_result in result["operations"]:
|
||||
if not op_result["success"]:
|
||||
operation_id = op_result["id"]
|
||||
raise FailedOperations((
|
||||
"Operation \"{}\" failed with data:\n{}\nError: {}."
|
||||
).format(
|
||||
operation_id,
|
||||
json.dumps(body_by_id[operation_id], indent=4),
|
||||
op_result.get("error", "unknown"),
|
||||
))
|
||||
|
||||
def create_entity(self, project_name, entity_type, data, nested_id=None):
|
||||
"""Fast access to 'ServerCreateOperation'.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project the creation happens.
|
||||
entity_type (str): Which entity type will be created.
|
||||
data (Dicst[str, Any]): Entity data.
|
||||
nested_id (str): Id of other operation from which is triggered
|
||||
operation -> Operations can trigger suboperations but they
|
||||
must be added to operations list after it's parent is added.
|
||||
|
||||
Returns:
|
||||
ServerCreateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = ServerCreateOperation(
|
||||
project_name, entity_type, data, self
|
||||
)
|
||||
|
||||
if nested_id:
|
||||
self._nested_operations[nested_id].append(operation)
|
||||
else:
|
||||
self.add(operation)
|
||||
if operation.id in self._nested_operations:
|
||||
self.extend(self._nested_operations.pop(operation.id))
|
||||
|
||||
return operation
|
||||
|
||||
def update_entity(
|
||||
self, project_name, entity_type, entity_id, update_data, nested_id=None
|
||||
):
|
||||
"""Fast access to 'ServerUpdateOperation'.
|
||||
|
||||
Returns:
|
||||
ServerUpdateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = ServerUpdateOperation(
|
||||
project_name, entity_type, entity_id, update_data, self
|
||||
)
|
||||
if nested_id:
|
||||
self._nested_operations[nested_id].append(operation)
|
||||
else:
|
||||
self.add(operation)
|
||||
if operation.id in self._nested_operations:
|
||||
self.extend(self._nested_operations.pop(operation.id))
|
||||
return operation
|
||||
|
||||
def delete_entity(
|
||||
self, project_name, entity_type, entity_id, nested_id=None
|
||||
):
|
||||
"""Fast access to 'ServerDeleteOperation'.
|
||||
|
||||
Returns:
|
||||
ServerDeleteOperation: Object of delete operation.
|
||||
"""
|
||||
|
||||
operation = ServerDeleteOperation(
|
||||
project_name, entity_type, entity_id, self
|
||||
)
|
||||
if nested_id:
|
||||
self._nested_operations[nested_id].append(operation)
|
||||
else:
|
||||
self.add(operation)
|
||||
if operation.id in self._nested_operations:
|
||||
self.extend(self._nested_operations.pop(operation.id))
|
||||
return operation
|
||||
|
||||
|
||||
def create_project(
|
||||
project_name,
|
||||
project_code,
|
||||
library_project=False,
|
||||
preset_name=None,
|
||||
con=None
|
||||
):
|
||||
"""Create project using OpenPype settings.
|
||||
|
||||
This project creation function is not validating project document on
|
||||
creation. It is because project document is created blindly with only
|
||||
minimum required information about project which is it's name, code, type
|
||||
and schema.
|
||||
|
||||
Entered project name must be unique and project must not exist yet.
|
||||
|
||||
Note:
|
||||
This function is here to be OP v4 ready but in v3 has more logic
|
||||
to do. That's why inner imports are in the body.
|
||||
|
||||
Args:
|
||||
project_name (str): New project name. Should be unique.
|
||||
project_code (str): Project's code should be unique too.
|
||||
library_project (bool): Project is library project.
|
||||
preset_name (str): Name of anatomy preset. Default is used if not
|
||||
passed.
|
||||
con (ServerAPI): Connection to server with logged user.
|
||||
|
||||
Raises:
|
||||
ValueError: When project name already exists in MongoDB.
|
||||
|
||||
Returns:
|
||||
dict: Created project document.
|
||||
"""
|
||||
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
|
||||
return con.create_project(
|
||||
project_name,
|
||||
project_code,
|
||||
library_project,
|
||||
preset_name
|
||||
)
|
||||
|
||||
|
||||
def delete_project(project_name, con=None):
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
|
||||
return con.delete_project(project_name)
|
||||
|
||||
|
||||
def create_thumbnail(project_name, src_filepath, thumbnail_id=None, con=None):
|
||||
if con is None:
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.create_thumbnail(project_name, src_filepath, thumbnail_id)
|
||||
|
|
@ -1,289 +0,0 @@
|
|||
import uuid
|
||||
import copy
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
import six
|
||||
|
||||
REMOVED_VALUE = object()
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractOperation(object):
|
||||
"""Base operation class.
|
||||
|
||||
Operation represent a call into database. The call can create, change or
|
||||
remove data.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type):
|
||||
self._project_name = project_name
|
||||
self._entity_type = entity_type
|
||||
self._id = str(uuid.uuid4())
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return self._project_name
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Identifier of operation."""
|
||||
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def entity_type(self):
|
||||
return self._entity_type
|
||||
|
||||
@abstractproperty
|
||||
def operation_name(self):
|
||||
"""Stringified type of operation."""
|
||||
|
||||
pass
|
||||
|
||||
def to_data(self):
|
||||
"""Convert operation to data that can be converted to json or others.
|
||||
|
||||
Warning:
|
||||
Current state returns ObjectId objects which cannot be parsed by
|
||||
json.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Description of operation.
|
||||
"""
|
||||
|
||||
return {
|
||||
"id": self._id,
|
||||
"entity_type": self.entity_type,
|
||||
"project_name": self.project_name,
|
||||
"operation": self.operation_name
|
||||
}
|
||||
|
||||
|
||||
class CreateOperation(AbstractOperation):
|
||||
"""Operation to create an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
data (Dict[str, Any]): Data of entity that will be created.
|
||||
"""
|
||||
|
||||
operation_name = "create"
|
||||
|
||||
def __init__(self, project_name, entity_type, data):
|
||||
super(CreateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
else:
|
||||
data = copy.deepcopy(dict(data))
|
||||
self._data = data
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.set_value(key, value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.data[key]
|
||||
|
||||
def set_value(self, key, value):
|
||||
self.data[key] = value
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self.data.get(key, *args, **kwargs)
|
||||
|
||||
@abstractproperty
|
||||
def entity_id(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self._data
|
||||
|
||||
def to_data(self):
|
||||
output = super(CreateOperation, self).to_data()
|
||||
output["data"] = copy.deepcopy(self.data)
|
||||
return output
|
||||
|
||||
|
||||
class UpdateOperation(AbstractOperation):
|
||||
"""Operation to update an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Identifier of an entity.
|
||||
update_data (Dict[str, Any]): Key -> value changes that will be set in
|
||||
database. If value is set to 'REMOVED_VALUE' the key will be
|
||||
removed. Only first level of dictionary is checked (on purpose).
|
||||
"""
|
||||
|
||||
operation_name = "update"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id, update_data):
|
||||
super(UpdateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = entity_id
|
||||
self._update_data = update_data
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
@property
|
||||
def update_data(self):
|
||||
return self._update_data
|
||||
|
||||
def to_data(self):
|
||||
changes = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
value = None
|
||||
changes[key] = value
|
||||
|
||||
output = super(UpdateOperation, self).to_data()
|
||||
output.update({
|
||||
"entity_id": self.entity_id,
|
||||
"changes": changes
|
||||
})
|
||||
return output
|
||||
|
||||
|
||||
class DeleteOperation(AbstractOperation):
|
||||
"""Operation to delete an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Entity id that will be removed.
|
||||
"""
|
||||
|
||||
operation_name = "delete"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id):
|
||||
super(DeleteOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = entity_id
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
def to_data(self):
|
||||
output = super(DeleteOperation, self).to_data()
|
||||
output["entity_id"] = self.entity_id
|
||||
return output
|
||||
|
||||
|
||||
class BaseOperationsSession(object):
|
||||
"""Session storing operations that should happen in an order.
|
||||
|
||||
At this moment does not handle anything special can be considered as
|
||||
stupid list of operations that will happen after each other. If creation
|
||||
of same entity is there multiple times it's handled in any way and document
|
||||
values are not validated.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._operations = []
|
||||
|
||||
def __len__(self):
|
||||
return len(self._operations)
|
||||
|
||||
def add(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
if not isinstance(
|
||||
operation,
|
||||
(CreateOperation, UpdateOperation, DeleteOperation)
|
||||
):
|
||||
raise TypeError("Expected Operation object got {}".format(
|
||||
str(type(operation))
|
||||
))
|
||||
|
||||
self._operations.append(operation)
|
||||
|
||||
def append(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
|
||||
self.add(operation)
|
||||
|
||||
def extend(self, operations):
|
||||
"""Add operations to be processed.
|
||||
|
||||
Args:
|
||||
operations (List[BaseOperation]): Operations that should be
|
||||
processed.
|
||||
"""
|
||||
|
||||
for operation in operations:
|
||||
self.add(operation)
|
||||
|
||||
def remove(self, operation):
|
||||
"""Remove operation."""
|
||||
|
||||
self._operations.remove(operation)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all registered operations."""
|
||||
|
||||
self._operations = []
|
||||
|
||||
def to_data(self):
|
||||
return [
|
||||
operation.to_data()
|
||||
for operation in self._operations
|
||||
]
|
||||
|
||||
@abstractmethod
|
||||
def commit(self):
|
||||
"""Commit session operations."""
|
||||
pass
|
||||
|
||||
def create_entity(self, project_name, entity_type, data):
|
||||
"""Fast access to 'CreateOperation'.
|
||||
|
||||
Returns:
|
||||
CreateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = CreateOperation(project_name, entity_type, data)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def update_entity(self, project_name, entity_type, entity_id, update_data):
|
||||
"""Fast access to 'UpdateOperation'.
|
||||
|
||||
Returns:
|
||||
UpdateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = UpdateOperation(
|
||||
project_name, entity_type, entity_id, update_data
|
||||
)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def delete_entity(self, project_name, entity_type, entity_id):
|
||||
"""Fast access to 'DeleteOperation'.
|
||||
|
||||
Returns:
|
||||
DeleteOperation: Object of delete operation.
|
||||
"""
|
||||
|
||||
operation = DeleteOperation(project_name, entity_type, entity_id)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
|
@ -1,134 +0,0 @@
|
|||
import os
|
||||
import uuid
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.client.operations_base import REMOVED_VALUE
|
||||
|
||||
|
||||
class _GlobalCache:
|
||||
initialized = False
|
||||
|
||||
|
||||
def get_ayon_server_api_connection():
|
||||
if _GlobalCache.initialized:
|
||||
con = ayon_api.get_server_api_connection()
|
||||
else:
|
||||
from ayon_core.lib.local_settings import get_local_site_id
|
||||
|
||||
_GlobalCache.initialized = True
|
||||
site_id = get_local_site_id()
|
||||
version = os.getenv("AYON_VERSION")
|
||||
if ayon_api.is_connection_created():
|
||||
con = ayon_api.get_server_api_connection()
|
||||
con.set_site_id(site_id)
|
||||
con.set_client_version(version)
|
||||
else:
|
||||
con = ayon_api.create_connection(site_id, version)
|
||||
return con
|
||||
|
||||
|
||||
def create_entity_id():
|
||||
return uuid.uuid1().hex
|
||||
|
||||
|
||||
def prepare_attribute_changes(old_entity, new_entity, replace=False):
|
||||
"""Prepare changes of attributes on entities.
|
||||
|
||||
Compare 'attrib' of old and new entity data to prepare only changed
|
||||
values that should be sent to server for update.
|
||||
|
||||
Example:
|
||||
>>> # Limited entity data to 'attrib'
|
||||
>>> old_entity = {
|
||||
... "attrib": {"attr_1": 1, "attr_2": "MyString", "attr_3": True}
|
||||
... }
|
||||
>>> new_entity = {
|
||||
... "attrib": {"attr_1": 2, "attr_3": True, "attr_4": 3}
|
||||
... }
|
||||
>>> # Changes if replacement should not happen
|
||||
>>> expected_changes = {
|
||||
... "attr_1": 2,
|
||||
... "attr_4": 3
|
||||
... }
|
||||
>>> changes = prepare_attribute_changes(old_entity, new_entity)
|
||||
>>> changes == expected_changes
|
||||
True
|
||||
|
||||
>>> # Changes if replacement should happen
|
||||
>>> expected_changes_replace = {
|
||||
... "attr_1": 2,
|
||||
... "attr_2": REMOVED_VALUE,
|
||||
... "attr_4": 3
|
||||
... }
|
||||
>>> changes_replace = prepare_attribute_changes(
|
||||
... old_entity, new_entity, True)
|
||||
>>> changes_replace == expected_changes_replace
|
||||
True
|
||||
|
||||
Args:
|
||||
old_entity (dict[str, Any]): Data of entity queried from server.
|
||||
new_entity (dict[str, Any]): Entity data with applied changes.
|
||||
replace (bool): New entity should fully replace all old entity values.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Values from new entity only if value has changed.
|
||||
"""
|
||||
|
||||
attrib_changes = {}
|
||||
new_attrib = new_entity.get("attrib")
|
||||
old_attrib = old_entity.get("attrib")
|
||||
if new_attrib is None:
|
||||
if not replace:
|
||||
return attrib_changes
|
||||
new_attrib = {}
|
||||
|
||||
if old_attrib is None:
|
||||
return new_attrib
|
||||
|
||||
for attr, new_attr_value in new_attrib.items():
|
||||
old_attr_value = old_attrib.get(attr)
|
||||
if old_attr_value != new_attr_value:
|
||||
attrib_changes[attr] = new_attr_value
|
||||
|
||||
if replace:
|
||||
for attr in old_attrib:
|
||||
if attr not in new_attrib:
|
||||
attrib_changes[attr] = REMOVED_VALUE
|
||||
|
||||
return attrib_changes
|
||||
|
||||
|
||||
def prepare_entity_changes(old_entity, new_entity, replace=False):
|
||||
"""Prepare changes of AYON entities.
|
||||
|
||||
Compare old and new entity to filter values from new data that changed.
|
||||
|
||||
Args:
|
||||
old_entity (dict[str, Any]): Data of entity queried from server.
|
||||
new_entity (dict[str, Any]): Entity data with applied changes.
|
||||
replace (bool): All attributes should be replaced by new values. So
|
||||
all attribute values that are not on new entity will be removed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Only values from new entity that changed.
|
||||
"""
|
||||
|
||||
changes = {}
|
||||
for key, new_value in new_entity.items():
|
||||
if key == "attrib":
|
||||
continue
|
||||
|
||||
old_value = old_entity.get(key)
|
||||
if old_value != new_value:
|
||||
changes[key] = new_value
|
||||
|
||||
if replace:
|
||||
for key in old_entity:
|
||||
if key not in new_entity:
|
||||
changes[key] = REMOVED_VALUE
|
||||
|
||||
attr_changes = prepare_attribute_changes(old_entity, new_entity, replace)
|
||||
if attr_changes:
|
||||
changes["attrib"] = attr_changes
|
||||
return changes
|
||||
|
|
@ -27,7 +27,8 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
"tvpaint",
|
||||
"substancepainter",
|
||||
"aftereffects",
|
||||
"wrap"
|
||||
"wrap",
|
||||
"openrv"
|
||||
}
|
||||
launch_types = {LaunchTypes.local}
|
||||
|
||||
|
|
|
|||
|
|
@ -54,21 +54,22 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
self.log.info("Last workfile does not exist.")
|
||||
|
||||
project_name = self.data["project_name"]
|
||||
asset_name = self.data["asset_name"]
|
||||
folder_path = self.data["folder_path"]
|
||||
task_name = self.data["task_name"]
|
||||
host_name = self.application.host_name
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
project_doc = self.data.get("project_doc")
|
||||
asset_doc = self.data.get("asset_doc")
|
||||
project_entity = self.data.get("project_entity")
|
||||
folder_entity = self.data.get("folder_entity")
|
||||
task_entity = self.data.get("task_entity")
|
||||
anatomy = self.data.get("anatomy")
|
||||
if project_doc and asset_doc:
|
||||
if project_entity and folder_entity and task_entity:
|
||||
self.log.debug("Started filtering of custom template paths.")
|
||||
template_path = get_custom_workfile_template(
|
||||
project_doc,
|
||||
asset_doc,
|
||||
task_name,
|
||||
project_entity,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
host_name,
|
||||
anatomy,
|
||||
project_settings
|
||||
|
|
@ -81,7 +82,7 @@ class CopyTemplateWorkfile(PreLaunchHook):
|
|||
))
|
||||
template_path = get_custom_workfile_template_by_string_context(
|
||||
project_name,
|
||||
asset_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
host_name,
|
||||
anatomy,
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class CreateWorkdirExtraFolders(PreLaunchHook):
|
|||
return
|
||||
|
||||
env = self.data.get("env") or {}
|
||||
workdir = env.get("AVALON_WORKDIR")
|
||||
workdir = env.get("AYON_WORKDIR")
|
||||
if not workdir or not os.path.exists(workdir):
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from ayon_core.client import get_project, get_asset_by_name
|
||||
from ayon_api import get_project, get_folder_by_path, get_task_by_name
|
||||
|
||||
from ayon_core.lib.applications import (
|
||||
PreLaunchHook,
|
||||
EnvironmentPrepData,
|
||||
|
|
@ -16,19 +17,20 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
"""Prepare global objects to `data` that will be used for sure."""
|
||||
self.prepare_global_data()
|
||||
|
||||
if not self.data.get("asset_doc"):
|
||||
if not self.data.get("folder_entity"):
|
||||
return
|
||||
|
||||
app = self.launch_context.application
|
||||
temp_data = EnvironmentPrepData({
|
||||
"project_name": self.data["project_name"],
|
||||
"asset_name": self.data["asset_name"],
|
||||
"folder_path": self.data["folder_path"],
|
||||
"task_name": self.data["task_name"],
|
||||
|
||||
"app": app,
|
||||
|
||||
"project_doc": self.data["project_doc"],
|
||||
"asset_doc": self.data["asset_doc"],
|
||||
"project_entity": self.data["project_entity"],
|
||||
"folder_entity": self.data["folder_entity"],
|
||||
"task_entity": self.data["task_entity"],
|
||||
|
||||
"anatomy": self.data["anatomy"],
|
||||
|
||||
|
|
@ -59,19 +61,37 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
return
|
||||
|
||||
self.log.debug("Project name is set to \"{}\"".format(project_name))
|
||||
|
||||
# Project Entity
|
||||
project_entity = get_project(project_name)
|
||||
self.data["project_entity"] = project_entity
|
||||
|
||||
# Anatomy
|
||||
self.data["anatomy"] = Anatomy(project_name)
|
||||
self.data["anatomy"] = Anatomy(
|
||||
project_name, project_entity=project_entity
|
||||
)
|
||||
|
||||
# Project document
|
||||
project_doc = get_project(project_name)
|
||||
self.data["project_doc"] = project_doc
|
||||
|
||||
asset_name = self.data.get("asset_name")
|
||||
if not asset_name:
|
||||
folder_path = self.data.get("folder_path")
|
||||
if not folder_path:
|
||||
self.log.warning(
|
||||
"Asset name was not set. Skipping asset document query."
|
||||
"Folder path is not set. Skipping folder query."
|
||||
)
|
||||
return
|
||||
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
self.data["asset_doc"] = asset_doc
|
||||
folder_entity = get_folder_by_path(project_name, folder_path)
|
||||
self.data["folder_entity"] = folder_entity
|
||||
|
||||
task_name = self.data.get("task_name")
|
||||
if not task_name:
|
||||
self.log.warning(
|
||||
"Task name is not set. Skipping task query."
|
||||
)
|
||||
return
|
||||
|
||||
if not folder_entity:
|
||||
return
|
||||
|
||||
task_entity = get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
self.data["task_entity"] = task_entity
|
||||
|
|
@ -19,6 +19,7 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
"nuke",
|
||||
"hiero",
|
||||
"resolve",
|
||||
"openrv"
|
||||
}
|
||||
launch_types = set()
|
||||
|
||||
|
|
@ -27,10 +28,10 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
|
||||
template_data = get_template_data_with_names(
|
||||
project_name=self.data["project_name"],
|
||||
asset_name=self.data["asset_name"],
|
||||
folder_path=self.data["folder_path"],
|
||||
task_name=self.data["task_name"],
|
||||
host_name=self.host_name,
|
||||
system_settings=self.data["system_settings"]
|
||||
settings=self.data["project_settings"]
|
||||
)
|
||||
|
||||
config_data = get_imageio_config(
|
||||
|
|
|
|||
|
|
@ -181,6 +181,10 @@ class HostDirmap(object):
|
|||
exclude_locals=False,
|
||||
cached=False)
|
||||
|
||||
# TODO implement
|
||||
# Dirmap is dependent on 'get_site_local_overrides' which
|
||||
# is not implemented in AYON. The mapping should be received
|
||||
# from sitesync addon.
|
||||
active_overrides = get_site_local_overrides(
|
||||
project_name, active_site)
|
||||
remote_overrides = get_site_local_overrides(
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class HostBase(object):
|
|||
Compared to 'avalon' concept:
|
||||
What was before considered as functions in host implementation folder. The
|
||||
host implementation should primarily care about adding ability of creation
|
||||
(mark subsets to be published) and optionally about referencing published
|
||||
(mark products to be published) and optionally about referencing published
|
||||
representations as containers.
|
||||
|
||||
Host may need extend some functionality like working with workfiles
|
||||
|
|
@ -106,15 +106,15 @@ class HostBase(object):
|
|||
Union[str, None]: Current project name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_PROJECT")
|
||||
return os.environ.get("AYON_PROJECT_NAME")
|
||||
|
||||
def get_current_asset_name(self):
|
||||
def get_current_folder_path(self):
|
||||
"""
|
||||
Returns:
|
||||
Union[str, None]: Current asset name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_ASSET")
|
||||
return os.environ.get("AYON_FOLDER_PATH")
|
||||
|
||||
def get_current_task_name(self):
|
||||
"""
|
||||
|
|
@ -122,7 +122,7 @@ class HostBase(object):
|
|||
Union[str, None]: Current task name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_TASK")
|
||||
return os.environ.get("AYON_TASK_NAME")
|
||||
|
||||
def get_current_context(self):
|
||||
"""Get current context information.
|
||||
|
|
@ -134,12 +134,12 @@ class HostBase(object):
|
|||
|
||||
Returns:
|
||||
Dict[str, Union[str, None]]: Context with 3 keys 'project_name',
|
||||
'asset_name' and 'task_name'. All of them can be 'None'.
|
||||
'folder_path' and 'task_name'. All of them can be 'None'.
|
||||
"""
|
||||
|
||||
return {
|
||||
"project_name": self.get_current_project_name(),
|
||||
"asset_name": self.get_current_asset_name(),
|
||||
"folder_path": self.get_current_folder_path(),
|
||||
"task_name": self.get_current_task_name()
|
||||
}
|
||||
|
||||
|
|
@ -161,13 +161,13 @@ class HostBase(object):
|
|||
# Use current context to fill the context title
|
||||
current_context = self.get_current_context()
|
||||
project_name = current_context["project_name"]
|
||||
asset_name = current_context["asset_name"]
|
||||
folder_path = current_context["folder_path"]
|
||||
task_name = current_context["task_name"]
|
||||
items = []
|
||||
if project_name:
|
||||
items.append(project_name)
|
||||
if asset_name:
|
||||
items.append(asset_name.lstrip("/"))
|
||||
if folder_path:
|
||||
items.append(folder_path.lstrip("/"))
|
||||
if task_name:
|
||||
items.append(task_name)
|
||||
if items:
|
||||
|
|
|
|||
|
|
@ -234,7 +234,7 @@ class IWorkfileHost:
|
|||
str: Path to new workdir.
|
||||
"""
|
||||
|
||||
return session["AVALON_WORKDIR"]
|
||||
return session["AYON_WORKDIR"]
|
||||
|
||||
# --- Deprecated method names ---
|
||||
def file_extensions(self):
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from .pipeline import (
|
|||
from .lib import (
|
||||
maintained_selection,
|
||||
get_extension_manifest_path,
|
||||
get_asset_settings,
|
||||
get_folder_settings,
|
||||
set_settings
|
||||
)
|
||||
|
||||
|
|
@ -37,7 +37,7 @@ __all__ = [
|
|||
# lib
|
||||
"maintained_selection",
|
||||
"get_extension_manifest_path",
|
||||
"get_asset_settings",
|
||||
"get_folder_settings",
|
||||
"set_settings",
|
||||
|
||||
# plugin
|
||||
|
|
|
|||
|
|
@ -286,22 +286,23 @@ class AfterEffectsRoute(WebSocketRoute):
|
|||
|
||||
# This method calls function on the client side
|
||||
# client functions
|
||||
async def set_context(self, project, asset, task):
|
||||
async def set_context(self, project, folder, task):
|
||||
"""
|
||||
Sets 'project' and 'asset' to envs, eg. setting context
|
||||
Sets 'project', 'folder' and 'task' to envs, eg. setting context
|
||||
|
||||
Args:
|
||||
project (str)
|
||||
asset (str)
|
||||
folder (str)
|
||||
task (str)
|
||||
"""
|
||||
log.info("Setting context change")
|
||||
log.info("project {} asset {} ".format(project, asset))
|
||||
log.info("project {} folder {} ".format(project, folder))
|
||||
if project:
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
if asset:
|
||||
os.environ["AVALON_ASSET"] = asset
|
||||
os.environ["AYON_PROJECT_NAME"] = project
|
||||
if folder:
|
||||
os.environ["AYON_FOLDER_PATH"] = folder
|
||||
if task:
|
||||
os.environ["AVALON_TASK"] = task
|
||||
os.environ["AYON_TASK_NAME"] = task
|
||||
|
||||
async def read(self):
|
||||
log.debug("aftereffects.read client calls server server calls "
|
||||
|
|
|
|||
|
|
@ -4,8 +4,10 @@ import json
|
|||
import contextlib
|
||||
import logging
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline.context_tools import get_current_context
|
||||
from ayon_core.client import get_asset_by_name
|
||||
|
||||
from .ws_stub import get_stub
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -85,21 +87,21 @@ def get_background_layers(file_url):
|
|||
return layers
|
||||
|
||||
|
||||
def get_asset_settings(asset_doc):
|
||||
"""Get settings on current asset from database.
|
||||
def get_folder_settings(folder_entity):
|
||||
"""Get settings of current folder.
|
||||
|
||||
Returns:
|
||||
dict: Scene data.
|
||||
|
||||
"""
|
||||
asset_data = asset_doc["data"]
|
||||
fps = asset_data.get("fps", 0)
|
||||
frame_start = asset_data.get("frameStart", 0)
|
||||
frame_end = asset_data.get("frameEnd", 0)
|
||||
handle_start = asset_data.get("handleStart", 0)
|
||||
handle_end = asset_data.get("handleEnd", 0)
|
||||
resolution_width = asset_data.get("resolutionWidth", 0)
|
||||
resolution_height = asset_data.get("resolutionHeight", 0)
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
fps = folder_attributes.get("fps", 0)
|
||||
frame_start = folder_attributes.get("frameStart", 0)
|
||||
frame_end = folder_attributes.get("frameEnd", 0)
|
||||
handle_start = folder_attributes.get("handleStart", 0)
|
||||
handle_end = folder_attributes.get("handleEnd", 0)
|
||||
resolution_width = folder_attributes.get("resolutionWidth", 0)
|
||||
resolution_height = folder_attributes.get("resolutionHeight", 0)
|
||||
duration = (frame_end - frame_start + 1) + handle_start + handle_end
|
||||
|
||||
return {
|
||||
|
|
@ -127,9 +129,11 @@ def set_settings(frames, resolution, comp_ids=None, print_msg=True):
|
|||
frame_start = frames_duration = fps = width = height = None
|
||||
current_context = get_current_context()
|
||||
|
||||
asset_doc = get_asset_by_name(current_context["project_name"],
|
||||
current_context["asset_name"])
|
||||
settings = get_asset_settings(asset_doc)
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
current_context["project_name"],
|
||||
current_context["folder_path"]
|
||||
)
|
||||
settings = get_folder_settings(folder_entity)
|
||||
|
||||
msg = ''
|
||||
if frames:
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ from ayon_core.pipeline import (
|
|||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
AVALON_INSTANCE_ID,
|
||||
AYON_INSTANCE_ID,
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api.workfile_template_builder import (
|
||||
AEPlaceholderLoadPlugin,
|
||||
|
|
@ -142,7 +144,9 @@ class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
layers_meta = stub.get_metadata()
|
||||
|
||||
for instance in layers_meta:
|
||||
if instance.get("id") == "pyblish.avalon.instance":
|
||||
if instance.get("id") in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
instances.append(instance)
|
||||
return instances
|
||||
|
||||
|
|
@ -267,7 +271,7 @@ def containerise(name,
|
|||
"name": name,
|
||||
"namespace": namespace,
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"members": comp.members or [comp.id]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from ayon_core.pipeline import (
|
|||
from ayon_core.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
from ayon_core.hosts.aftereffects.api.lib import set_settings
|
||||
from ayon_core.lib import prepare_template_data
|
||||
from ayon_core.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
|
||||
from ayon_core.pipeline.create import PRODUCT_NAME_ALLOWED_SYMBOLS
|
||||
|
||||
|
||||
class RenderCreator(Creator):
|
||||
|
|
@ -22,7 +22,7 @@ class RenderCreator(Creator):
|
|||
"""
|
||||
identifier = "render"
|
||||
label = "Render"
|
||||
family = "render"
|
||||
product_type = "render"
|
||||
description = "Render creator"
|
||||
|
||||
create_allow_context_change = True
|
||||
|
|
@ -31,7 +31,7 @@ class RenderCreator(Creator):
|
|||
mark_for_review = True
|
||||
force_setting_values = True
|
||||
|
||||
def create(self, subset_name_from_ui, data, pre_create_data):
|
||||
def create(self, product_name, data, pre_create_data):
|
||||
stub = api.get_stub() # only after After Effects is up
|
||||
|
||||
try:
|
||||
|
|
@ -58,33 +58,37 @@ class RenderCreator(Creator):
|
|||
len(comps) > 1)
|
||||
for comp in comps:
|
||||
composition_name = re.sub(
|
||||
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
|
||||
"[^{}]+".format(PRODUCT_NAME_ALLOWED_SYMBOLS),
|
||||
"",
|
||||
comp.name
|
||||
)
|
||||
if use_composition_name:
|
||||
if "{composition}" not in subset_name_from_ui.lower():
|
||||
subset_name_from_ui += "{Composition}"
|
||||
if "{composition}" not in product_name.lower():
|
||||
product_name += "{Composition}"
|
||||
|
||||
dynamic_fill = prepare_template_data({"composition":
|
||||
composition_name})
|
||||
subset_name = subset_name_from_ui.format(**dynamic_fill)
|
||||
comp_product_name = product_name.format(**dynamic_fill)
|
||||
data["composition_name"] = composition_name
|
||||
else:
|
||||
subset_name = subset_name_from_ui
|
||||
subset_name = re.sub(r"\{composition\}", '', subset_name,
|
||||
flags=re.IGNORECASE)
|
||||
comp_product_name = re.sub(
|
||||
r"\{composition\}",
|
||||
"",
|
||||
product_name,
|
||||
flags=re.IGNORECASE
|
||||
)
|
||||
|
||||
for inst in self.create_context.instances:
|
||||
if subset_name == inst.subset_name:
|
||||
if comp_product_name == inst.product_name:
|
||||
raise CreatorError("{} already exists".format(
|
||||
inst.subset_name))
|
||||
inst.product_name))
|
||||
|
||||
data["members"] = [comp.id]
|
||||
data["orig_comp_name"] = composition_name
|
||||
|
||||
new_instance = CreatedInstance(self.family, subset_name, data,
|
||||
self)
|
||||
new_instance = CreatedInstance(
|
||||
self.product_type, comp_product_name, data, self
|
||||
)
|
||||
if "farm" in pre_create_data:
|
||||
use_farm = pre_create_data["farm"]
|
||||
new_instance.creator_attributes["farm"] = use_farm
|
||||
|
|
@ -96,7 +100,7 @@ class RenderCreator(Creator):
|
|||
new_instance.data_to_store())
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
stub.rename_item(comp.id, subset_name)
|
||||
stub.rename_item(comp.id, comp_product_name)
|
||||
if self.force_setting_values:
|
||||
set_settings(True, True, [comp.id], print_msg=False)
|
||||
|
||||
|
|
@ -107,7 +111,7 @@ class RenderCreator(Creator):
|
|||
"selected by default.",
|
||||
default=True, label="Use selection"),
|
||||
BoolDef("use_composition_name",
|
||||
label="Use composition name in subset"),
|
||||
label="Use composition name in product"),
|
||||
UISeparatorDef(),
|
||||
BoolDef("farm", label="Render on farm"),
|
||||
BoolDef(
|
||||
|
|
@ -133,9 +137,14 @@ class RenderCreator(Creator):
|
|||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
# legacy instances have family=='render' or 'renderLocal', use them
|
||||
creator_id = (instance_data.get("creator_identifier") or
|
||||
instance_data.get("family", '').replace("Local", ''))
|
||||
# legacy instances have product_type=='render' or 'renderLocal', use them
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if not creator_id:
|
||||
# NOTE this is for backwards compatibility but probably can be
|
||||
# removed
|
||||
creator_id = instance_data.get("family", "")
|
||||
creator_id = creator_id.replace("Local", "")
|
||||
|
||||
if creator_id == self.identifier:
|
||||
instance_data = self._handle_legacy(instance_data)
|
||||
instance = CreatedInstance.from_existing(
|
||||
|
|
@ -147,10 +156,10 @@ class RenderCreator(Creator):
|
|||
for created_inst, _changes in update_list:
|
||||
api.get_stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
subset_change = _changes.get("subset")
|
||||
if subset_change:
|
||||
name_change = _changes.get("productName")
|
||||
if name_change:
|
||||
api.get_stub().rename_item(created_inst.data["members"][0],
|
||||
subset_change.new_value)
|
||||
name_change.new_value)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
"""Removes metadata and renames to original comp name if available."""
|
||||
|
|
@ -183,33 +192,40 @@ class RenderCreator(Creator):
|
|||
def get_detail_description(self):
|
||||
return """Creator for Render instances
|
||||
|
||||
Main publishable item in AfterEffects will be of `render` family.
|
||||
Main publishable item in AfterEffects will be of `render` product type.
|
||||
Result of this item (instance) is picture sequence or video that could
|
||||
be a final delivery product or loaded and used in another DCCs.
|
||||
|
||||
Select single composition and create instance of 'render' family or
|
||||
turn off 'Use selection' to create instance for all compositions.
|
||||
Select single composition and create instance of 'render' product type
|
||||
or turn off 'Use selection' to create instance for all compositions.
|
||||
|
||||
'Use composition name in subset' allows to explicitly add composition
|
||||
name into created subset name.
|
||||
'Use composition name in product' allows to explicitly add composition
|
||||
name into created product name.
|
||||
|
||||
Position of composition name could be set in
|
||||
`project_settings/global/tools/creator/subset_name_profiles` with some
|
||||
form of '{composition}' placeholder.
|
||||
`project_settings/global/tools/creator/product_name_profiles` with
|
||||
some form of '{composition}' placeholder.
|
||||
|
||||
Composition name will be used implicitly if multiple composition should
|
||||
be handled at same time.
|
||||
|
||||
If {composition} placeholder is not us 'subset_name_profiles'
|
||||
composition name will be capitalized and set at the end of subset name
|
||||
if necessary.
|
||||
If {composition} placeholder is not us 'product_name_profiles'
|
||||
composition name will be capitalized and set at the end of
|
||||
product name if necessary.
|
||||
|
||||
If composition name should be used, it will be cleaned up of characters
|
||||
that would cause an issue in published file names.
|
||||
"""
|
||||
|
||||
def get_dynamic_data(self, variant, task_name, asset_doc,
|
||||
project_name, host_name, instance):
|
||||
def get_dynamic_data(
|
||||
self,
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
variant,
|
||||
host_name,
|
||||
instance
|
||||
):
|
||||
dynamic_data = {}
|
||||
if instance is not None:
|
||||
composition_name = instance.get("composition_name")
|
||||
|
|
@ -234,9 +250,9 @@ class RenderCreator(Creator):
|
|||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("creator_attributes"):
|
||||
is_old_farm = instance_data["family"] != "renderLocal"
|
||||
is_old_farm = instance_data.get("family") != "renderLocal"
|
||||
instance_data["creator_attributes"] = {"farm": is_old_farm}
|
||||
instance_data["family"] = self.family
|
||||
instance_data["productType"] = self.product_type
|
||||
|
||||
if instance_data["creator_attributes"].get("mark_for_review") is None:
|
||||
instance_data["creator_attributes"]["mark_for_review"] = True
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import ayon_api
|
||||
|
||||
import ayon_core.hosts.aftereffects.api as api
|
||||
from ayon_core.client import get_asset_by_name
|
||||
from ayon_core.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance
|
||||
|
|
@ -9,7 +10,7 @@ from ayon_core.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
|||
|
||||
class AEWorkfileCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
product_type = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
|
|
@ -20,9 +21,9 @@ class AEWorkfileCreator(AutoCreator):
|
|||
for instance_data in cache_and_get_instances(self):
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
subset_name = instance_data["subset"]
|
||||
product_name = instance_data["productName"]
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
self.product_type, product_name, instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
|
|
@ -33,38 +34,50 @@ class AEWorkfileCreator(AutoCreator):
|
|||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
if instance.product_type == self.product_type:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
folder_path = context.get_current_folder_path()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
|
||||
existing_asset_name = None
|
||||
existing_folder_path = None
|
||||
if existing_instance is not None:
|
||||
existing_asset_name = existing_instance.get("folderPath")
|
||||
existing_folder_path = existing_instance.get("folderPath")
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
data = {
|
||||
"folderPath": asset_name,
|
||||
"folderPath": folder_path,
|
||||
"task": task_name,
|
||||
"variant": self.default_variant,
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name, None
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
None,
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
self.product_type, product_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
|
|
@ -72,14 +85,22 @@ class AEWorkfileCreator(AutoCreator):
|
|||
new_instance.data_to_store())
|
||||
|
||||
elif (
|
||||
existing_asset_name != asset_name
|
||||
existing_folder_path != folder_path
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
existing_instance["folderPath"] = asset_name
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
existing_instance["folderPath"] = folder_path
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["subset"] = subset_name
|
||||
existing_instance["productName"] = product_name
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from ayon_core.hosts.aftereffects.api.lib import (
|
|||
|
||||
class BackgroundLoader(api.AfterEffectsLoader):
|
||||
"""
|
||||
Load images from Background family
|
||||
Load images from Background product type
|
||||
Creates for each background separate folder with all imported images
|
||||
from background json AND automatically created composition with layers,
|
||||
each layer for separate image.
|
||||
|
|
@ -31,7 +31,7 @@ class BackgroundLoader(api.AfterEffectsLoader):
|
|||
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_items,
|
||||
"{}_{}".format(context["asset"]["name"], name))
|
||||
"{}_{}".format(context["folder"]["name"], name))
|
||||
|
||||
path = self.filepath_from_context(context)
|
||||
layers = get_background_layers(path)
|
||||
|
|
@ -56,16 +56,19 @@ class BackgroundLoader(api.AfterEffectsLoader):
|
|||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
def update(self, container, context):
|
||||
""" Switch asset or change version """
|
||||
stub = self.get_stub()
|
||||
context = representation.get("context", {})
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
_ = container.pop("layer")
|
||||
|
||||
# without iterator number (_001, 002...)
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
comp_name = "{}_{}".format(context["asset"], context["subset"])
|
||||
comp_name = "{}_{}".format(folder_name, product_name)
|
||||
|
||||
# switching assets
|
||||
if namespace_from_container != comp_name:
|
||||
|
|
@ -73,11 +76,11 @@ class BackgroundLoader(api.AfterEffectsLoader):
|
|||
existing_items = [layer.name for layer in items]
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_items,
|
||||
"{}_{}".format(context["asset"], context["subset"]))
|
||||
"{}_{}".format(folder_name, product_name))
|
||||
else: # switching version - keep same name
|
||||
comp_name = container["namespace"]
|
||||
|
||||
path = get_representation_path(representation)
|
||||
path = get_representation_path(repre_entity)
|
||||
|
||||
layers = get_background_layers(path)
|
||||
comp = stub.reload_background(container["members"][1],
|
||||
|
|
@ -85,8 +88,8 @@ class BackgroundLoader(api.AfterEffectsLoader):
|
|||
layers)
|
||||
|
||||
# update container
|
||||
container["representation"] = str(representation["_id"])
|
||||
container["name"] = context["subset"]
|
||||
container["representation"] = repre_entity["id"]
|
||||
container["name"] = product_name
|
||||
container["namespace"] = comp_name
|
||||
container["members"] = comp.members
|
||||
|
||||
|
|
@ -104,5 +107,5 @@ class BackgroundLoader(api.AfterEffectsLoader):
|
|||
stub.imprint(layer.id, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,10 @@ class FileLoader(api.AfterEffectsLoader):
|
|||
layers = stub.get_items(comps=True, folders=True, footages=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
comp_name = get_unique_layer_name(
|
||||
existing_layers, "{}_{}".format(context["asset"]["name"], name))
|
||||
existing_layers, "{}_{}".format(
|
||||
context["folder"]["name"], name
|
||||
)
|
||||
)
|
||||
|
||||
import_options = {}
|
||||
|
||||
|
|
@ -35,7 +38,7 @@ class FileLoader(api.AfterEffectsLoader):
|
|||
import_options['sequence'] = True
|
||||
|
||||
if not path:
|
||||
repr_id = context["representation"]["_id"]
|
||||
repr_id = context["representation"]["id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
|
@ -64,31 +67,33 @@ class FileLoader(api.AfterEffectsLoader):
|
|||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
def update(self, container, context):
|
||||
""" Switch asset or change version """
|
||||
stub = self.get_stub()
|
||||
layer = container.pop("layer")
|
||||
|
||||
context = representation.get("context", {})
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
repre_entity = context["representation"]
|
||||
|
||||
namespace_from_container = re.sub(r'_\d{3}$', '',
|
||||
container["namespace"])
|
||||
layer_name = "{}_{}".format(context["asset"], context["subset"])
|
||||
layer_name = "{}_{}".format(folder_name, product_name)
|
||||
# switching assets
|
||||
if namespace_from_container != layer_name:
|
||||
layers = stub.get_items(comps=True)
|
||||
existing_layers = [layer.name for layer in layers]
|
||||
layer_name = get_unique_layer_name(
|
||||
existing_layers,
|
||||
"{}_{}".format(context["asset"], context["subset"]))
|
||||
"{}_{}".format(folder_name, product_name))
|
||||
else: # switching version - keep same name
|
||||
layer_name = container["namespace"]
|
||||
path = get_representation_path(representation)
|
||||
path = get_representation_path(repre_entity)
|
||||
# with aftereffects.maintained_selection(): # TODO
|
||||
stub.replace_item(layer.id, path, stub.LOADED_ICON + layer_name)
|
||||
stub.imprint(
|
||||
layer.id, {"representation": str(representation["_id"]),
|
||||
"name": context["subset"],
|
||||
layer.id, {"representation": repre_entity["id"],
|
||||
"name": product_name,
|
||||
"namespace": layer_name}
|
||||
)
|
||||
|
||||
|
|
@ -103,5 +108,5 @@ class FileLoader(api.AfterEffectsLoader):
|
|||
stub.imprint(layer.id, {})
|
||||
stub.delete_item(layer.id)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
def switch(self, container, context):
|
||||
self.update(container, context)
|
||||
|
|
|
|||
|
|
@ -60,8 +60,8 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
if not inst.data.get("active", True):
|
||||
continue
|
||||
|
||||
family = inst.data["family"]
|
||||
if family not in ["render", "renderLocal"]: # legacy
|
||||
product_type = inst.data["productType"]
|
||||
if product_type not in ["render", "renderLocal"]: # legacy
|
||||
continue
|
||||
|
||||
comp_id = int(inst.data["members"][0])
|
||||
|
|
@ -81,29 +81,32 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
fps = comp_info.frameRate
|
||||
# TODO add resolution when supported by extension
|
||||
|
||||
task_name = inst.data.get("task") # legacy
|
||||
task_name = inst.data.get("task")
|
||||
|
||||
render_q = CollectAERender.get_stub().get_render_info(comp_id)
|
||||
if not render_q:
|
||||
raise ValueError("No file extension set in Render Queue")
|
||||
render_item = render_q[0]
|
||||
|
||||
product_type = "render"
|
||||
instance_families = inst.data.get("families", [])
|
||||
subset_name = inst.data["subset"]
|
||||
instance_families.append(product_type)
|
||||
product_name = inst.data["productName"]
|
||||
instance = AERenderInstance(
|
||||
family="render",
|
||||
productType=product_type,
|
||||
family=product_type,
|
||||
families=instance_families,
|
||||
version=version,
|
||||
time="",
|
||||
source=current_file,
|
||||
label="{} - {}".format(subset_name, family),
|
||||
subset=subset_name,
|
||||
asset=inst.data["asset"],
|
||||
label="{} - {}".format(product_name, product_type),
|
||||
productName=product_name,
|
||||
folderPath=inst.data["folderPath"],
|
||||
task=task_name,
|
||||
attachTo=False,
|
||||
setMembers='',
|
||||
publish=True,
|
||||
name=subset_name,
|
||||
name=product_name,
|
||||
resolutionWidth=render_item.width,
|
||||
resolutionHeight=render_item.height,
|
||||
pixelAspect=1,
|
||||
|
|
@ -175,8 +178,8 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
version_str = "v{:03d}".format(render_instance.version)
|
||||
if "#" not in file_name: # single frame (mov)W
|
||||
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
|
||||
render_instance.asset,
|
||||
render_instance.subset,
|
||||
render_instance.folderPath,
|
||||
render_instance.productName,
|
||||
version_str,
|
||||
ext
|
||||
))
|
||||
|
|
@ -184,8 +187,8 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
else:
|
||||
for frame in range(start, end + 1):
|
||||
path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format(
|
||||
render_instance.asset,
|
||||
render_instance.subset,
|
||||
render_instance.folderPath,
|
||||
render_instance.productName,
|
||||
version_str,
|
||||
str(frame).zfill(self.padding_width),
|
||||
ext
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ Requires:
|
|||
None
|
||||
|
||||
Provides:
|
||||
instance -> family ("review")
|
||||
instance -> families ("review")
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,6 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.client import get_asset_name_identifier
|
||||
from ayon_core.pipeline.create import get_subset_name
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
""" Adds the AE render instances """
|
||||
|
|
@ -15,86 +12,24 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
default_variant = "Main"
|
||||
|
||||
def process(self, context):
|
||||
existing_instance = None
|
||||
workfile_instance = None
|
||||
for instance in context:
|
||||
if instance.data["family"] == "workfile":
|
||||
self.log.debug("Workfile instance found, won't create new")
|
||||
existing_instance = instance
|
||||
if instance.data["productType"] == "workfile":
|
||||
self.log.debug("Workfile instance found")
|
||||
workfile_instance = instance
|
||||
break
|
||||
|
||||
current_file = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(current_file)
|
||||
scene_file = os.path.basename(current_file)
|
||||
if existing_instance is None: # old publish
|
||||
instance = self._get_new_instance(context, scene_file)
|
||||
else:
|
||||
instance = existing_instance
|
||||
if workfile_instance is None:
|
||||
self.log.debug("Workfile instance not found. Skipping")
|
||||
return
|
||||
|
||||
# creating representation
|
||||
representation = {
|
||||
'name': 'aep',
|
||||
'ext': 'aep',
|
||||
'files': scene_file,
|
||||
workfile_instance.data["representations"].append({
|
||||
"name": "aep",
|
||||
"ext": "aep",
|
||||
"files": scene_file,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
if not instance.data.get("representations"):
|
||||
instance.data["representations"] = []
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.data["publish"] = instance.data["active"] # for DL
|
||||
|
||||
def _get_new_instance(self, context, scene_file):
|
||||
task = context.data["task"]
|
||||
version = context.data["version"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
project_entity = context.data["projectEntity"]
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_entity)
|
||||
|
||||
instance_data = {
|
||||
"active": True,
|
||||
"asset": asset_name,
|
||||
"task": task,
|
||||
"frameStart": context.data['frameStart'],
|
||||
"frameEnd": context.data['frameEnd'],
|
||||
"handleStart": context.data['handleStart'],
|
||||
"handleEnd": context.data['handleEnd'],
|
||||
"fps": asset_entity["data"]["fps"],
|
||||
"resolutionWidth": asset_entity["data"].get(
|
||||
"resolutionWidth",
|
||||
project_entity["data"]["resolutionWidth"]),
|
||||
"resolutionHeight": asset_entity["data"].get(
|
||||
"resolutionHeight",
|
||||
project_entity["data"]["resolutionHeight"]),
|
||||
"pixelAspect": 1,
|
||||
"step": 1,
|
||||
"version": version
|
||||
}
|
||||
|
||||
# workfile instance
|
||||
family = "workfile"
|
||||
subset = get_subset_name(
|
||||
family,
|
||||
self.default_variant,
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
host_name=context.data["hostName"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
|
||||
# creating instance data
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": scene_file,
|
||||
"family": family,
|
||||
"families": [family],
|
||||
"representations": list()
|
||||
})
|
||||
|
||||
instance.data.update(instance_data)
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Subset context</title>
|
||||
<title>Product context</title>
|
||||
<description>
|
||||
## Invalid subset context
|
||||
## Invalid product context
|
||||
|
||||
Context of the given subset doesn't match your current scene.
|
||||
Context of the given product doesn't match your current scene.
|
||||
|
||||
### How to repair?
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ You can fix this with "repair" button on the right and refresh Publish at the bo
|
|||
### __Detailed Info__ (optional)
|
||||
|
||||
This might happen if you are reuse old workfile and open it in different context.
|
||||
(Eg. you created subset "renderCompositingDefault" from asset "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing subset for "Robot" asset stayed in the workfile.)
|
||||
(Eg. you created product name "renderCompositingDefault" from folder "Robot' in "your_project_Robot_compositing.aep", now you opened this workfile in a context "Sloth" but existing product for "Robot" folder stayed in the workfile.)
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -5,20 +5,20 @@
|
|||
<description>
|
||||
## Invalid scene setting found
|
||||
|
||||
One of the settings in a scene doesn't match to asset settings in database.
|
||||
One of the settings in a scene doesn't match to folder settings in database.
|
||||
|
||||
{invalid_setting_str}
|
||||
|
||||
### How to repair?
|
||||
|
||||
Change values for {invalid_keys_str} in the scene OR change them in the asset database if they are wrong there.
|
||||
Change values for {invalid_keys_str} in the scene OR change them in the folder database if they are wrong there.
|
||||
|
||||
In the scene it is right mouse click on published composition > `Composition Settings`.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__ (optional)
|
||||
|
||||
This error is shown when for example resolution in the scene doesn't match to resolution set on the asset in the database.
|
||||
This error is shown when for example resolution in the scene doesn't match to resolution set on the folder in the database.
|
||||
Either value in the database or in the scene is wrong.
|
||||
</detail>
|
||||
</error>
|
||||
|
|
|
|||
|
|
@ -1,54 +0,0 @@
|
|||
import json
|
||||
import pyblish.api
|
||||
from ayon_core.hosts.aftereffects.api import AfterEffectsHost
|
||||
|
||||
|
||||
class PreCollectRender(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Checks if render instance is of old type, adds to families to both
|
||||
existing collectors work same way.
|
||||
|
||||
Could be removed in the future when no one uses old publish.
|
||||
"""
|
||||
|
||||
label = "PreCollect Render"
|
||||
order = pyblish.api.CollectorOrder + 0.400
|
||||
hosts = ["aftereffects"]
|
||||
|
||||
family_remapping = {
|
||||
"render": ("render.farm", "farm"), # (family, label)
|
||||
"renderLocal": ("render.local", "local")
|
||||
}
|
||||
|
||||
def process(self, context):
|
||||
if context.data.get("newPublishing"):
|
||||
self.log.debug("Not applicable for New Publisher, skip")
|
||||
return
|
||||
|
||||
for inst in AfterEffectsHost().list_instances():
|
||||
if inst.get("creator_attributes"):
|
||||
raise ValueError("Instance created in New publisher, "
|
||||
"cannot be published in Pyblish.\n"
|
||||
"Please publish in New Publisher "
|
||||
"or recreate instances with legacy Creators")
|
||||
|
||||
if inst["family"] not in self.family_remapping.keys():
|
||||
continue
|
||||
|
||||
if not inst["members"]:
|
||||
raise ValueError("Couldn't find id, unable to publish. " +
|
||||
"Please recreate instance.")
|
||||
|
||||
instance = context.create_instance(inst["subset"])
|
||||
inst["families"] = [self.family_remapping[inst["family"]][0]]
|
||||
instance.data.update(inst)
|
||||
|
||||
self._debug_log(instance)
|
||||
|
||||
def _debug_log(self, instance):
|
||||
def _default_json(value):
|
||||
return str(value)
|
||||
|
||||
self.log.info(
|
||||
json.dumps(instance.data, indent=4, default=_default_json)
|
||||
)
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import get_current_asset_name
|
||||
from ayon_core.pipeline import get_current_folder_path
|
||||
from ayon_core.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishXmlValidationError,
|
||||
|
|
@ -8,8 +8,8 @@ from ayon_core.pipeline.publish import (
|
|||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
class ValidateInstanceAssetRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset with value from Context."""
|
||||
class ValidateInstanceFolderRepair(pyblish.api.Action):
|
||||
"""Repair the instance folder with value from Context."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
|
|
@ -30,35 +30,35 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
|
|||
for instance in instances:
|
||||
data = stub.read(instance[0])
|
||||
|
||||
data["asset"] = get_current_asset_name()
|
||||
data["folderPath"] = get_current_folder_path()
|
||||
stub.imprint(instance[0].instance_id, data)
|
||||
|
||||
|
||||
class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance asset is the current selected context asset.
|
||||
class ValidateInstanceFolder(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance folder is the current selected context folder.
|
||||
|
||||
As it might happen that multiple worfiles are opened at same time,
|
||||
switching between them would mess with selected context. (From Launcher
|
||||
or Ftrack).
|
||||
|
||||
In that case outputs might be output under wrong asset!
|
||||
In that case outputs might be output under wrong folder!
|
||||
|
||||
Repair action will use Context asset value (from Workfiles or Launcher)
|
||||
Repair action will use Context folder value (from Workfiles or Launcher)
|
||||
Closing and reopening with Workfiles will refresh Context value.
|
||||
"""
|
||||
|
||||
label = "Validate Instance Asset"
|
||||
label = "Validate Instance Folder"
|
||||
hosts = ["aftereffects"]
|
||||
actions = [ValidateInstanceAssetRepair]
|
||||
actions = [ValidateInstanceFolderRepair]
|
||||
order = ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
current_asset = get_current_asset_name()
|
||||
instance_folder = instance.data["folderPath"]
|
||||
current_folder = get_current_folder_path()
|
||||
msg = (
|
||||
f"Instance asset {instance_asset} is not the same "
|
||||
f"as current context {current_asset}."
|
||||
f"Instance folder {instance_folder} is not the same "
|
||||
f"as current context {current_folder}."
|
||||
)
|
||||
|
||||
if instance_asset != current_asset:
|
||||
if instance_folder != current_folder:
|
||||
raise PublishXmlValidationError(self, msg)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate scene settings.
|
||||
Requires:
|
||||
instance -> assetEntity
|
||||
instance -> folderEntity
|
||||
instance -> anatomyData
|
||||
"""
|
||||
import os
|
||||
|
|
@ -13,7 +13,7 @@ from ayon_core.pipeline import (
|
|||
PublishXmlValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.hosts.aftereffects.api import get_asset_settings
|
||||
from ayon_core.hosts.aftereffects.api import get_folder_settings
|
||||
|
||||
|
||||
class ValidateSceneSettings(OptionalPyblishPluginMixin,
|
||||
|
|
@ -48,7 +48,7 @@ class ValidateSceneSettings(OptionalPyblishPluginMixin,
|
|||
fps
|
||||
handleStart
|
||||
handleEnd
|
||||
skip_resolution_check - fill entity type ('asset') to skip validation
|
||||
skip_resolution_check - fill entity type ('folder') to skip validation
|
||||
resolutionWidth
|
||||
resolutionHeight
|
||||
TODO support in extension is missing for now
|
||||
|
|
@ -71,11 +71,11 @@ class ValidateSceneSettings(OptionalPyblishPluginMixin,
|
|||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
expected_settings = get_asset_settings(asset_doc)
|
||||
folder_entity = instance.data["folderEntity"]
|
||||
expected_settings = get_folder_settings(folder_entity)
|
||||
self.log.info("config from DB::{}".format(expected_settings))
|
||||
|
||||
task_name = instance.data["anatomyData"]["task"]["name"]
|
||||
task_name = instance.data["task"]
|
||||
if any(re.search(pattern, task_name)
|
||||
for pattern in self.skip_resolution_check):
|
||||
expected_settings.pop("resolutionWidth")
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import bpy
|
|||
import bpy.utils.previews
|
||||
|
||||
from ayon_core import style
|
||||
from ayon_core.pipeline import get_current_asset_name, get_current_task_name
|
||||
from ayon_core.pipeline import get_current_folder_path, get_current_task_name
|
||||
from ayon_core.tools.utils import host_tools
|
||||
|
||||
from .workio import OpenFileCacher
|
||||
|
|
@ -355,7 +355,7 @@ class SetFrameRange(bpy.types.Operator):
|
|||
bl_label = "Set Frame Range"
|
||||
|
||||
def execute(self, context):
|
||||
data = pipeline.get_asset_data()
|
||||
data = pipeline.get_folder_attributes()
|
||||
pipeline.set_frame_range(data)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
|
@ -365,7 +365,7 @@ class SetResolution(bpy.types.Operator):
|
|||
bl_label = "Set Resolution"
|
||||
|
||||
def execute(self, context):
|
||||
data = pipeline.get_asset_data()
|
||||
data = pipeline.get_folder_attributes()
|
||||
pipeline.set_resolution(data)
|
||||
return {"FINISHED"}
|
||||
|
||||
|
|
@ -388,9 +388,9 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
|
|||
else:
|
||||
pyblish_menu_icon_id = 0
|
||||
|
||||
asset = get_current_asset_name()
|
||||
task = get_current_task_name()
|
||||
context_label = f"{asset}, {task}"
|
||||
folder_path = get_current_folder_path()
|
||||
task_name = get_current_task_name()
|
||||
context_label = f"{folder_path}, {task_name}"
|
||||
context_label_item = layout.row()
|
||||
context_label_item.operator(
|
||||
LaunchWorkFiles.bl_idname, text=context_label
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from . import lib
|
|||
from . import ops
|
||||
|
||||
import pyblish.api
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.host import (
|
||||
HostBase,
|
||||
|
|
@ -16,16 +17,16 @@ from ayon_core.host import (
|
|||
IPublishHost,
|
||||
ILoadHost
|
||||
)
|
||||
from ayon_core.client import get_asset_by_name
|
||||
from ayon_core.pipeline import (
|
||||
schema,
|
||||
get_current_project_name,
|
||||
get_current_asset_name,
|
||||
get_current_folder_path,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
AYON_CONTAINER_ID,
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
Logger,
|
||||
|
|
@ -220,12 +221,12 @@ def message_window(title, message):
|
|||
_process_app_events()
|
||||
|
||||
|
||||
def get_asset_data():
|
||||
def get_folder_attributes():
|
||||
project_name = get_current_project_name()
|
||||
asset_name = get_current_asset_name()
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
folder_path = get_current_folder_path()
|
||||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
|
||||
return asset_doc.get("data")
|
||||
return folder_entity["attrib"]
|
||||
|
||||
|
||||
def set_frame_range(data):
|
||||
|
|
@ -272,13 +273,13 @@ def set_resolution(data):
|
|||
|
||||
|
||||
def on_new():
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
project = os.environ.get("AYON_PROJECT_NAME")
|
||||
settings = get_project_settings(project).get("blender")
|
||||
|
||||
set_resolution_startup = settings.get("set_resolution_startup")
|
||||
set_frames_startup = settings.get("set_frames_startup")
|
||||
|
||||
data = get_asset_data()
|
||||
data = get_folder_attributes()
|
||||
|
||||
if set_resolution_startup:
|
||||
set_resolution(data)
|
||||
|
|
@ -293,13 +294,13 @@ def on_new():
|
|||
|
||||
|
||||
def on_open():
|
||||
project = os.environ.get("AVALON_PROJECT")
|
||||
project = os.environ.get("AYON_PROJECT_NAME")
|
||||
settings = get_project_settings(project).get("blender")
|
||||
|
||||
set_resolution_startup = settings.get("set_resolution_startup")
|
||||
set_frames_startup = settings.get("set_frames_startup")
|
||||
|
||||
data = get_asset_data()
|
||||
data = get_folder_attributes()
|
||||
|
||||
if set_resolution_startup:
|
||||
set_resolution(data)
|
||||
|
|
@ -379,7 +380,7 @@ def _on_task_changed():
|
|||
# `directory` attribute, so it opens in that directory (does it?).
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.Operator.html#calling-a-file-selector
|
||||
# https://docs.blender.org/api/blender2.8/bpy.types.WindowManager.html#bpy.types.WindowManager.fileselect_add
|
||||
workdir = os.getenv("AVALON_WORKDIR")
|
||||
workdir = os.getenv("AYON_WORKDIR")
|
||||
log.debug("New working directory: %s", workdir)
|
||||
|
||||
|
||||
|
|
@ -467,7 +468,7 @@ def containerise(name: str,
|
|||
|
||||
"""
|
||||
|
||||
node_name = f"{context['asset']['name']}_{name}"
|
||||
node_name = f"{context['folder']['name']}_{name}"
|
||||
if namespace:
|
||||
node_name = f"{namespace}:{node_name}"
|
||||
if suffix:
|
||||
|
|
@ -483,7 +484,7 @@ def containerise(name: str,
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
}
|
||||
|
||||
metadata_update(container, data)
|
||||
|
|
@ -522,7 +523,7 @@ def containerise_existing(
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
}
|
||||
|
||||
metadata_update(container, data)
|
||||
|
|
@ -563,8 +564,9 @@ def ls() -> Iterator:
|
|||
called containers.
|
||||
"""
|
||||
|
||||
for container in lib.lsattr("id", AVALON_CONTAINER_ID):
|
||||
yield parse_container(container)
|
||||
for id_type in {AYON_CONTAINER_ID, AVALON_CONTAINER_ID}:
|
||||
for container in lib.lsattr("id", id_type):
|
||||
yield parse_container(container)
|
||||
|
||||
|
||||
def publish():
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ from ayon_core.pipeline import (
|
|||
Creator,
|
||||
CreatedInstance,
|
||||
LoaderPlugin,
|
||||
AVALON_INSTANCE_ID,
|
||||
AYON_INSTANCE_ID,
|
||||
)
|
||||
from ayon_core.lib import BoolDef
|
||||
|
||||
|
|
@ -28,13 +30,13 @@ VALID_EXTENSIONS = [".blend", ".json", ".abc", ".fbx"]
|
|||
|
||||
|
||||
def prepare_scene_name(
|
||||
asset: str, subset: str, namespace: Optional[str] = None
|
||||
folder_name: str, product_name: str, namespace: Optional[str] = None
|
||||
) -> str:
|
||||
"""Return a consistent name for an asset."""
|
||||
name = f"{asset}"
|
||||
name = f"{folder_name}"
|
||||
if namespace:
|
||||
name = f"{name}_{namespace}"
|
||||
name = f"{name}_{subset}"
|
||||
name = f"{name}_{product_name}"
|
||||
|
||||
# Blender name for a collection or object cannot be longer than 63
|
||||
# characters. If the name is longer, it will raise an error.
|
||||
|
|
@ -45,9 +47,9 @@ def prepare_scene_name(
|
|||
|
||||
|
||||
def get_unique_number(
|
||||
asset: str, subset: str
|
||||
folder_name: str, product_name: str
|
||||
) -> str:
|
||||
"""Return a unique number based on the asset name."""
|
||||
"""Return a unique number based on the folder name."""
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
return "01"
|
||||
|
|
@ -62,10 +64,10 @@ def get_unique_number(
|
|||
if c.get(AVALON_PROPERTY)}
|
||||
container_names = obj_group_names.union(coll_group_names)
|
||||
count = 1
|
||||
name = f"{asset}_{count:0>2}_{subset}"
|
||||
name = f"{folder_name}_{count:0>2}_{product_name}"
|
||||
while name in container_names:
|
||||
count += 1
|
||||
name = f"{asset}_{count:0>2}_{subset}"
|
||||
name = f"{folder_name}_{count:0>2}_{product_name}"
|
||||
return f"{count:0>2}"
|
||||
|
||||
|
||||
|
|
@ -159,24 +161,22 @@ class BaseCreator(Creator):
|
|||
create_as_asset_group = False
|
||||
|
||||
@staticmethod
|
||||
def cache_subsets(shared_data):
|
||||
def cache_instance_data(shared_data):
|
||||
"""Cache instances for Creators shared data.
|
||||
|
||||
Create `blender_cached_subsets` key when needed in shared data and
|
||||
Create `blender_cached_instances` key when needed in shared data and
|
||||
fill it with all collected instances from the scene under its
|
||||
respective creator identifiers.
|
||||
|
||||
If legacy instances are detected in the scene, create
|
||||
`blender_cached_legacy_subsets` key and fill it with
|
||||
all legacy subsets from this family as a value. # key or value?
|
||||
`blender_cached_legacy_instances` key and fill it with
|
||||
all legacy products from this family as a value. # key or value?
|
||||
|
||||
Args:
|
||||
shared_data(Dict[str, Any]): Shared data.
|
||||
|
||||
Return:
|
||||
Dict[str, Any]: Shared data with cached subsets.
|
||||
"""
|
||||
if not shared_data.get('blender_cached_subsets'):
|
||||
if not shared_data.get('blender_cached_instances'):
|
||||
cache = {}
|
||||
cache_legacy = {}
|
||||
|
||||
|
|
@ -193,7 +193,9 @@ class BaseCreator(Creator):
|
|||
if not avalon_prop:
|
||||
continue
|
||||
|
||||
if avalon_prop.get('id') != 'pyblish.avalon.instance':
|
||||
if avalon_prop.get('id') not in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
continue
|
||||
|
||||
creator_id = avalon_prop.get('creator_identifier')
|
||||
|
|
@ -206,21 +208,21 @@ class BaseCreator(Creator):
|
|||
# Legacy creator instance
|
||||
cache_legacy.setdefault(family, []).append(obj_or_col)
|
||||
|
||||
shared_data["blender_cached_subsets"] = cache
|
||||
shared_data["blender_cached_legacy_subsets"] = cache_legacy
|
||||
shared_data["blender_cached_instances"] = cache
|
||||
shared_data["blender_cached_legacy_instances"] = cache_legacy
|
||||
|
||||
return shared_data
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
"""Override abstract method from Creator.
|
||||
Create new instance and store it.
|
||||
|
||||
Args:
|
||||
subset_name(str): Subset name of created instance.
|
||||
instance_data(dict): Instance base data.
|
||||
pre_create_data(dict): Data based on pre creation attributes.
|
||||
product_name (str): Product name of created instance.
|
||||
instance_data (dict): Instance base data.
|
||||
pre_create_data (dict): Data based on pre creation attributes.
|
||||
Those may affect how creator works.
|
||||
"""
|
||||
# Get Instance Container or create it if it does not exist
|
||||
|
|
@ -230,9 +232,9 @@ class BaseCreator(Creator):
|
|||
bpy.context.scene.collection.children.link(instances)
|
||||
|
||||
# Create asset group
|
||||
asset_name = instance_data["folderPath"].split("/")[-1]
|
||||
folder_name = instance_data["folderPath"].split("/")[-1]
|
||||
|
||||
name = prepare_scene_name(asset_name, subset_name)
|
||||
name = prepare_scene_name(folder_name, product_name)
|
||||
if self.create_as_asset_group:
|
||||
# Create instance as empty
|
||||
instance_node = bpy.data.objects.new(name=name, object_data=None)
|
||||
|
|
@ -243,10 +245,10 @@ class BaseCreator(Creator):
|
|||
instance_node = bpy.data.collections.new(name=name)
|
||||
instances.children.link(instance_node)
|
||||
|
||||
self.set_instance_data(subset_name, instance_data)
|
||||
self.set_instance_data(product_name, instance_data)
|
||||
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
self.product_type, product_name, instance_data, self
|
||||
)
|
||||
instance.transient_data["instance_node"] = instance_node
|
||||
self._add_instance_to_context(instance)
|
||||
|
|
@ -259,18 +261,18 @@ class BaseCreator(Creator):
|
|||
"""Override abstract method from BaseCreator.
|
||||
Collect existing instances related to this creator plugin."""
|
||||
|
||||
# Cache subsets in shared data
|
||||
self.cache_subsets(self.collection_shared_data)
|
||||
# Cache instances in shared data
|
||||
self.cache_instance_data(self.collection_shared_data)
|
||||
|
||||
# Get cached subsets
|
||||
cached_subsets = self.collection_shared_data.get(
|
||||
"blender_cached_subsets"
|
||||
# Get cached instances
|
||||
cached_instances = self.collection_shared_data.get(
|
||||
"blender_cached_instances"
|
||||
)
|
||||
if not cached_subsets:
|
||||
if not cached_instances:
|
||||
return
|
||||
|
||||
# Process only instances that were created by this creator
|
||||
for instance_node in cached_subsets.get(self.identifier, []):
|
||||
for instance_node in cached_instances.get(self.identifier, []):
|
||||
property = instance_node.get(AVALON_PROPERTY)
|
||||
# Create instance object from existing data
|
||||
instance = CreatedInstance.from_existing(
|
||||
|
|
@ -302,16 +304,17 @@ class BaseCreator(Creator):
|
|||
)
|
||||
return
|
||||
|
||||
# Rename the instance node in the scene if subset or asset changed.
|
||||
# Rename the instance node in the scene if product
|
||||
# or folder changed.
|
||||
# Do not rename the instance if the family is workfile, as the
|
||||
# workfile instance is included in the AVALON_CONTAINER collection.
|
||||
if (
|
||||
"subset" in changes.changed_keys
|
||||
"productName" in changes.changed_keys
|
||||
or "folderPath" in changes.changed_keys
|
||||
) and created_instance.family != "workfile":
|
||||
asset_name = data["folderPath"].split("/")[-1]
|
||||
) and created_instance.product_type != "workfile":
|
||||
folder_name = data["folderPath"].split("/")[-1]
|
||||
name = prepare_scene_name(
|
||||
asset=asset_name, subset=data["subset"]
|
||||
folder_name, data["productName"]
|
||||
)
|
||||
node.name = name
|
||||
|
||||
|
|
@ -337,13 +340,13 @@ class BaseCreator(Creator):
|
|||
|
||||
def set_instance_data(
|
||||
self,
|
||||
subset_name: str,
|
||||
product_name: str,
|
||||
instance_data: dict
|
||||
):
|
||||
"""Fill instance data with required items.
|
||||
|
||||
Args:
|
||||
subset_name(str): Subset name of created instance.
|
||||
product_name(str): Product name of created instance.
|
||||
instance_data(dict): Instance base data.
|
||||
instance_node(bpy.types.ID): Instance node in blender scene.
|
||||
"""
|
||||
|
|
@ -352,9 +355,9 @@ class BaseCreator(Creator):
|
|||
|
||||
instance_data.update(
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"id": AVALON_INSTANCE_ID,
|
||||
"creator_identifier": self.identifier,
|
||||
"subset": subset_name,
|
||||
"productName": product_name,
|
||||
}
|
||||
)
|
||||
|
||||
|
|
@ -462,14 +465,14 @@ class AssetLoader(LoaderPlugin):
|
|||
filepath = self.filepath_from_context(context)
|
||||
assert Path(filepath).exists(), f"{filepath} doesn't exist."
|
||||
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
unique_number = get_unique_number(
|
||||
asset, subset
|
||||
folder_name, product_name
|
||||
)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
name = name or prepare_scene_name(
|
||||
asset, subset, unique_number
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
|
||||
nodes = self.process_asset(
|
||||
|
|
@ -495,21 +498,21 @@ class AssetLoader(LoaderPlugin):
|
|||
# loader=self.__class__.__name__,
|
||||
# )
|
||||
|
||||
# asset = context["asset"]["name"]
|
||||
# subset = context["subset"]["name"]
|
||||
# folder_name = context["folder"]["name"]
|
||||
# product_name = context["product"]["name"]
|
||||
# instance_name = prepare_scene_name(
|
||||
# asset, subset, unique_number
|
||||
# folder_name, product_name, unique_number
|
||||
# ) + '_CON'
|
||||
|
||||
# return self._get_instance_collection(instance_name, nodes)
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""Must be implemented by a sub-class"""
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
def update(self, container: Dict, context: Dict):
|
||||
""" Run the update on Blender main thread"""
|
||||
mti = MainThreadItem(self.exec_update, container, representation)
|
||||
mti = MainThreadItem(self.exec_update, container, context)
|
||||
execute_in_main_thread(mti)
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
|
|
|
|||
|
|
@ -47,6 +47,22 @@ def get_multilayer(settings):
|
|||
["multilayer_exr"])
|
||||
|
||||
|
||||
def get_renderer(settings):
|
||||
"""Get renderer from blender settings."""
|
||||
|
||||
return (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["renderer"])
|
||||
|
||||
|
||||
def get_compositing(settings):
|
||||
"""Get compositing from blender settings."""
|
||||
|
||||
return (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["compositing"])
|
||||
|
||||
|
||||
def get_render_product(output_path, name, aov_sep):
|
||||
"""
|
||||
Generate the path to the render product. Blender interprets the `#`
|
||||
|
|
@ -91,66 +107,120 @@ def set_render_format(ext, multilayer):
|
|||
image_settings.file_format = "TIFF"
|
||||
|
||||
|
||||
def set_render_passes(settings):
|
||||
aov_list = (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["aov_list"])
|
||||
|
||||
custom_passes = (settings["blender"]
|
||||
["RenderSettings"]
|
||||
["custom_passes"])
|
||||
def set_render_passes(settings, renderer):
|
||||
aov_list = set(settings["blender"]["RenderSettings"]["aov_list"])
|
||||
custom_passes = settings["blender"]["RenderSettings"]["custom_passes"]
|
||||
|
||||
# Common passes for both renderers
|
||||
vl = bpy.context.view_layer
|
||||
|
||||
# Data Passes
|
||||
vl.use_pass_combined = "combined" in aov_list
|
||||
vl.use_pass_z = "z" in aov_list
|
||||
vl.use_pass_mist = "mist" in aov_list
|
||||
vl.use_pass_normal = "normal" in aov_list
|
||||
|
||||
# Light Passes
|
||||
vl.use_pass_diffuse_direct = "diffuse_light" in aov_list
|
||||
vl.use_pass_diffuse_color = "diffuse_color" in aov_list
|
||||
vl.use_pass_glossy_direct = "specular_light" in aov_list
|
||||
vl.use_pass_glossy_color = "specular_color" in aov_list
|
||||
vl.eevee.use_pass_volume_direct = "volume_light" in aov_list
|
||||
vl.use_pass_emit = "emission" in aov_list
|
||||
vl.use_pass_environment = "environment" in aov_list
|
||||
vl.use_pass_shadow = "shadow" in aov_list
|
||||
vl.use_pass_ambient_occlusion = "ao" in aov_list
|
||||
|
||||
cycles = vl.cycles
|
||||
# Cryptomatte Passes
|
||||
vl.use_pass_cryptomatte_object = "cryptomatte_object" in aov_list
|
||||
vl.use_pass_cryptomatte_material = "cryptomatte_material" in aov_list
|
||||
vl.use_pass_cryptomatte_asset = "cryptomatte_asset" in aov_list
|
||||
|
||||
cycles.denoising_store_passes = "denoising" in aov_list
|
||||
cycles.use_pass_volume_direct = "volume_direct" in aov_list
|
||||
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
|
||||
if renderer == "BLENDER_EEVEE":
|
||||
# Eevee exclusive passes
|
||||
eevee = vl.eevee
|
||||
|
||||
# Light Passes
|
||||
vl.use_pass_shadow = "shadow" in aov_list
|
||||
eevee.use_pass_volume_direct = "volume_light" in aov_list
|
||||
|
||||
# Effects Passes
|
||||
eevee.use_pass_bloom = "bloom" in aov_list
|
||||
eevee.use_pass_transparent = "transparent" in aov_list
|
||||
|
||||
# Cryptomatte Passes
|
||||
vl.use_pass_cryptomatte_accurate = "cryptomatte_accurate" in aov_list
|
||||
elif renderer == "CYCLES":
|
||||
# Cycles exclusive passes
|
||||
cycles = vl.cycles
|
||||
|
||||
# Data Passes
|
||||
vl.use_pass_position = "position" in aov_list
|
||||
vl.use_pass_vector = "vector" in aov_list
|
||||
vl.use_pass_uv = "uv" in aov_list
|
||||
cycles.denoising_store_passes = "denoising" in aov_list
|
||||
vl.use_pass_object_index = "object_index" in aov_list
|
||||
vl.use_pass_material_index = "material_index" in aov_list
|
||||
cycles.pass_debug_sample_count = "sample_count" in aov_list
|
||||
|
||||
# Light Passes
|
||||
vl.use_pass_diffuse_indirect = "diffuse_indirect" in aov_list
|
||||
vl.use_pass_glossy_indirect = "specular_indirect" in aov_list
|
||||
vl.use_pass_transmission_direct = "transmission_direct" in aov_list
|
||||
vl.use_pass_transmission_indirect = "transmission_indirect" in aov_list
|
||||
vl.use_pass_transmission_color = "transmission_color" in aov_list
|
||||
cycles.use_pass_volume_direct = "volume_light" in aov_list
|
||||
cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
|
||||
cycles.use_pass_shadow_catcher = "shadow" in aov_list
|
||||
|
||||
aovs_names = [aov.name for aov in vl.aovs]
|
||||
for cp in custom_passes:
|
||||
cp_name = cp[0]
|
||||
cp_name = cp["attribute"]
|
||||
if cp_name not in aovs_names:
|
||||
aov = vl.aovs.add()
|
||||
aov.name = cp_name
|
||||
else:
|
||||
aov = vl.aovs[cp_name]
|
||||
aov.type = cp[1].get("type", "VALUE")
|
||||
aov.type = cp["value"]
|
||||
|
||||
return aov_list, custom_passes
|
||||
return list(aov_list), custom_passes
|
||||
|
||||
|
||||
def set_node_tree(output_path, name, aov_sep, ext, multilayer):
|
||||
def _create_aov_slot(name, aov_sep, slots, rpass_name, multi_exr, output_path):
|
||||
filename = f"{name}{aov_sep}{rpass_name}.####"
|
||||
slot = slots.new(rpass_name if multi_exr else filename)
|
||||
filepath = str(output_path / filename.lstrip("/"))
|
||||
|
||||
return slot, filepath
|
||||
|
||||
|
||||
def set_node_tree(
|
||||
output_path, render_product, name, aov_sep, ext, multilayer, compositing
|
||||
):
|
||||
# Set the scene to use the compositor node tree to render
|
||||
bpy.context.scene.use_nodes = True
|
||||
|
||||
tree = bpy.context.scene.node_tree
|
||||
|
||||
# Get the Render Layers node
|
||||
rl_node = None
|
||||
comp_layer_type = "CompositorNodeRLayers"
|
||||
output_type = "CompositorNodeOutputFile"
|
||||
compositor_type = "CompositorNodeComposite"
|
||||
|
||||
# Get the Render Layer, Composite and the previous output nodes
|
||||
render_layer_node = None
|
||||
composite_node = None
|
||||
old_output_node = None
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == "CompositorNodeRLayers":
|
||||
rl_node = node
|
||||
if node.bl_idname == comp_layer_type:
|
||||
render_layer_node = node
|
||||
elif node.bl_idname == compositor_type:
|
||||
composite_node = node
|
||||
elif node.bl_idname == output_type and "AYON" in node.name:
|
||||
old_output_node = node
|
||||
if render_layer_node and composite_node and old_output_node:
|
||||
break
|
||||
|
||||
# If there's not a Render Layers node, we create it
|
||||
if not rl_node:
|
||||
rl_node = tree.nodes.new("CompositorNodeRLayers")
|
||||
if not render_layer_node:
|
||||
render_layer_node = tree.nodes.new(comp_layer_type)
|
||||
|
||||
# Get the enabled output sockets, that are the active passes for the
|
||||
# render.
|
||||
|
|
@ -158,48 +228,81 @@ def set_node_tree(output_path, name, aov_sep, ext, multilayer):
|
|||
exclude_sockets = ["Image", "Alpha", "Noisy Image"]
|
||||
passes = [
|
||||
socket
|
||||
for socket in rl_node.outputs
|
||||
for socket in render_layer_node.outputs
|
||||
if socket.enabled and socket.name not in exclude_sockets
|
||||
]
|
||||
|
||||
# Remove all output nodes
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == "CompositorNodeOutputFile":
|
||||
tree.nodes.remove(node)
|
||||
|
||||
# Create a new output node
|
||||
output = tree.nodes.new("CompositorNodeOutputFile")
|
||||
output = tree.nodes.new(output_type)
|
||||
|
||||
image_settings = bpy.context.scene.render.image_settings
|
||||
output.format.file_format = image_settings.file_format
|
||||
|
||||
slots = None
|
||||
|
||||
# In case of a multilayer exr, we don't need to use the output node,
|
||||
# because the blender render already outputs a multilayer exr.
|
||||
if ext == "exr" and multilayer:
|
||||
output.layer_slots.clear()
|
||||
return []
|
||||
multi_exr = ext == "exr" and multilayer
|
||||
slots = output.layer_slots if multi_exr else output.file_slots
|
||||
output.base_path = render_product if multi_exr else str(output_path)
|
||||
|
||||
output.file_slots.clear()
|
||||
output.base_path = str(output_path)
|
||||
slots.clear()
|
||||
|
||||
aov_file_products = []
|
||||
|
||||
old_links = {
|
||||
link.from_socket.name: link for link in tree.links
|
||||
if link.to_node == old_output_node}
|
||||
|
||||
# Create a new socket for the beauty output
|
||||
pass_name = "rgba" if multi_exr else "beauty"
|
||||
slot, _ = _create_aov_slot(
|
||||
name, aov_sep, slots, pass_name, multi_exr, output_path)
|
||||
tree.links.new(render_layer_node.outputs["Image"], slot)
|
||||
|
||||
if compositing:
|
||||
# Create a new socket for the composite output
|
||||
pass_name = "composite"
|
||||
comp_socket, filepath = _create_aov_slot(
|
||||
name, aov_sep, slots, pass_name, multi_exr, output_path)
|
||||
aov_file_products.append(("Composite", filepath))
|
||||
|
||||
# For each active render pass, we add a new socket to the output node
|
||||
# and link it
|
||||
for render_pass in passes:
|
||||
filepath = f"{name}{aov_sep}{render_pass.name}.####"
|
||||
for rpass in passes:
|
||||
slot, filepath = _create_aov_slot(
|
||||
name, aov_sep, slots, rpass.name, multi_exr, output_path)
|
||||
aov_file_products.append((rpass.name, filepath))
|
||||
|
||||
output.file_slots.new(filepath)
|
||||
# If the rpass was not connected with the old output node, we connect
|
||||
# it with the new one.
|
||||
if not old_links.get(rpass.name):
|
||||
tree.links.new(rpass, slot)
|
||||
|
||||
filename = str(output_path / filepath.lstrip("/"))
|
||||
for link in list(old_links.values()):
|
||||
# Check if the socket is still available in the new output node.
|
||||
socket = output.inputs.get(link.to_socket.name)
|
||||
# If it is, we connect it with the new output node.
|
||||
if socket:
|
||||
tree.links.new(link.from_socket, socket)
|
||||
# Then, we remove the old link.
|
||||
tree.links.remove(link)
|
||||
|
||||
aov_file_products.append((render_pass.name, filename))
|
||||
# If there's a composite node, we connect its input with the new output
|
||||
if compositing and composite_node:
|
||||
for link in tree.links:
|
||||
if link.to_node == composite_node:
|
||||
tree.links.new(link.from_socket, comp_socket)
|
||||
break
|
||||
|
||||
node_input = output.inputs[-1]
|
||||
if old_output_node:
|
||||
output.location = old_output_node.location
|
||||
tree.nodes.remove(old_output_node)
|
||||
|
||||
tree.links.new(render_pass, node_input)
|
||||
output.name = "AYON File Output"
|
||||
output.label = "AYON File Output"
|
||||
|
||||
return aov_file_products
|
||||
return [] if multi_exr else aov_file_products
|
||||
|
||||
|
||||
def imprint_render_settings(node, data):
|
||||
|
|
@ -228,17 +331,23 @@ def prepare_rendering(asset_group):
|
|||
aov_sep = get_aov_separator(settings)
|
||||
ext = get_image_format(settings)
|
||||
multilayer = get_multilayer(settings)
|
||||
renderer = get_renderer(settings)
|
||||
compositing = get_compositing(settings)
|
||||
|
||||
set_render_format(ext, multilayer)
|
||||
aov_list, custom_passes = set_render_passes(settings)
|
||||
bpy.context.scene.render.engine = renderer
|
||||
aov_list, custom_passes = set_render_passes(settings, renderer)
|
||||
|
||||
output_path = Path.joinpath(dirpath, render_folder, file_name)
|
||||
|
||||
render_product = get_render_product(output_path, name, aov_sep)
|
||||
aov_file_product = set_node_tree(
|
||||
output_path, name, aov_sep, ext, multilayer)
|
||||
output_path, render_product, name, aov_sep,
|
||||
ext, multilayer, compositing)
|
||||
|
||||
bpy.context.scene.render.filepath = render_product
|
||||
# Clear the render filepath, so that the output is handled only by the
|
||||
# output node in the compositor.
|
||||
bpy.context.scene.render.filepath = ""
|
||||
|
||||
render_settings = {
|
||||
"render_folder": render_folder,
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ def file_extensions() -> List[str]:
|
|||
def work_root(session: dict) -> str:
|
||||
"""Return the default root to browse for work files."""
|
||||
|
||||
work_dir = session["AVALON_WORKDIR"]
|
||||
work_dir = session["AYON_WORKDIR"]
|
||||
scene_dir = session.get("AVALON_SCENEDIR")
|
||||
if scene_dir:
|
||||
return str(Path(work_dir, scene_dir))
|
||||
|
|
|
|||
|
|
@ -1,24 +1,24 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Converter for legacy Houdini subsets."""
|
||||
from ayon_core.pipeline.create.creator_plugins import SubsetConvertorPlugin
|
||||
"""Converter for legacy Houdini products."""
|
||||
from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin
|
||||
from ayon_core.hosts.blender.api.lib import imprint
|
||||
|
||||
|
||||
class BlenderLegacyConvertor(SubsetConvertorPlugin):
|
||||
"""Find and convert any legacy subsets in the scene.
|
||||
class BlenderLegacyConvertor(ProductConvertorPlugin):
|
||||
"""Find and convert any legacy products in the scene.
|
||||
|
||||
This Converter will find all legacy subsets in the scene and will
|
||||
transform them to the current system. Since the old subsets doesn't
|
||||
This Converter will find all legacy products in the scene and will
|
||||
transform them to the current system. Since the old products doesn't
|
||||
retain any information about their original creators, the only mapping
|
||||
we can do is based on their families.
|
||||
we can do is based on their product types.
|
||||
|
||||
Its limitation is that you can have multiple creators creating subset
|
||||
of the same family and there is no way to handle it. This code should
|
||||
nevertheless cover all creators that came with OpenPype.
|
||||
Its limitation is that you can have multiple creators creating product
|
||||
of the same product type and there is no way to handle it. This code
|
||||
should nevertheless cover all creators that came with OpenPype.
|
||||
|
||||
"""
|
||||
identifier = "io.openpype.creators.blender.legacy"
|
||||
family_to_id = {
|
||||
product_type_to_id = {
|
||||
"action": "io.openpype.creators.blender.action",
|
||||
"camera": "io.openpype.creators.blender.camera",
|
||||
"animation": "io.openpype.creators.blender.animation",
|
||||
|
|
@ -33,42 +33,42 @@ class BlenderLegacyConvertor(SubsetConvertorPlugin):
|
|||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BlenderLegacyConvertor, self).__init__(*args, **kwargs)
|
||||
self.legacy_subsets = {}
|
||||
self.legacy_instances = {}
|
||||
|
||||
def find_instances(self):
|
||||
"""Find legacy subsets in the scene.
|
||||
"""Find legacy products in the scene.
|
||||
|
||||
Legacy subsets are the ones that doesn't have `creator_identifier`
|
||||
Legacy products are the ones that doesn't have `creator_identifier`
|
||||
parameter on them.
|
||||
|
||||
This is using cached entries done in
|
||||
:py:meth:`~BaseCreator.cache_subsets()`
|
||||
:py:meth:`~BaseCreator.cache_instance_data()`
|
||||
|
||||
"""
|
||||
self.legacy_subsets = self.collection_shared_data.get(
|
||||
"blender_cached_legacy_subsets")
|
||||
if not self.legacy_subsets:
|
||||
self.legacy_instances = self.collection_shared_data.get(
|
||||
"blender_cached_legacy_instances")
|
||||
if not self.legacy_instances:
|
||||
return
|
||||
self.add_convertor_item(
|
||||
"Found {} incompatible subset{}".format(
|
||||
len(self.legacy_subsets),
|
||||
"s" if len(self.legacy_subsets) > 1 else ""
|
||||
"Found {} incompatible product{}".format(
|
||||
len(self.legacy_instances),
|
||||
"s" if len(self.legacy_instances) > 1 else ""
|
||||
)
|
||||
)
|
||||
|
||||
def convert(self):
|
||||
"""Convert all legacy subsets to current.
|
||||
"""Convert all legacy products to current.
|
||||
|
||||
It is enough to add `creator_identifier` and `instance_node`.
|
||||
|
||||
"""
|
||||
if not self.legacy_subsets:
|
||||
if not self.legacy_instances:
|
||||
return
|
||||
|
||||
for family, instance_nodes in self.legacy_subsets.items():
|
||||
if family in self.family_to_id:
|
||||
for product_type, instance_nodes in self.legacy_instances.items():
|
||||
if product_type in self.product_type_to_id:
|
||||
for instance_node in instance_nodes:
|
||||
creator_identifier = self.family_to_id[family]
|
||||
creator_identifier = self.product_type_to_id[product_type]
|
||||
self.log.info(
|
||||
"Converting {} to {}".format(instance_node.name,
|
||||
creator_identifier)
|
||||
|
|
|
|||
|
|
@ -10,19 +10,21 @@ class CreateAction(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.action"
|
||||
label = "Action"
|
||||
family = "action"
|
||||
product_type = "action"
|
||||
icon = "male"
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
# Run parent create method
|
||||
collection = super().create(
|
||||
subset_name, instance_data, pre_create_data
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
# Get instance name
|
||||
name = plugin.prepare_scene_name(instance_data["asset"], subset_name)
|
||||
name = plugin.prepare_scene_name(
|
||||
instance_data["folderPath"], product_name
|
||||
)
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
for obj in lib.get_selection():
|
||||
|
|
|
|||
|
|
@ -8,15 +8,15 @@ class CreateAnimation(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.animation"
|
||||
label = "Animation"
|
||||
family = "animation"
|
||||
product_type = "animation"
|
||||
icon = "male"
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
# Run parent create method
|
||||
collection = super().create(
|
||||
subset_name, instance_data, pre_create_data
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
|
|
|
|||
|
|
@ -10,16 +10,16 @@ class CreateBlendScene(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.blendscene"
|
||||
label = "Blender Scene"
|
||||
family = "blendScene"
|
||||
product_type = "blendScene"
|
||||
icon = "cubes"
|
||||
|
||||
maintain_selection = False
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
|
||||
instance_node = super().create(subset_name,
|
||||
instance_node = super().create(product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -11,16 +11,16 @@ class CreateCamera(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.camera"
|
||||
label = "Camera"
|
||||
family = "camera"
|
||||
product_type = "camera"
|
||||
icon = "video-camera"
|
||||
|
||||
create_as_asset_group = True
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
|
||||
asset_group = super().create(subset_name,
|
||||
asset_group = super().create(product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
|
@ -30,8 +30,8 @@ class CreateCamera(plugin.BaseCreator):
|
|||
obj.parent = asset_group
|
||||
else:
|
||||
plugin.deselect_all()
|
||||
camera = bpy.data.cameras.new(subset_name)
|
||||
camera_obj = bpy.data.objects.new(subset_name, camera)
|
||||
camera = bpy.data.cameras.new(product_name)
|
||||
camera_obj = bpy.data.objects.new(product_name, camera)
|
||||
|
||||
instances = bpy.data.collections.get(AVALON_INSTANCES)
|
||||
instances.objects.link(camera_obj)
|
||||
|
|
|
|||
|
|
@ -10,16 +10,16 @@ class CreateLayout(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.layout"
|
||||
label = "Layout"
|
||||
family = "layout"
|
||||
product_type = "layout"
|
||||
icon = "cubes"
|
||||
|
||||
create_as_asset_group = True
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
|
||||
asset_group = super().create(subset_name,
|
||||
asset_group = super().create(product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,15 +10,15 @@ class CreateModel(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.model"
|
||||
label = "Model"
|
||||
family = "model"
|
||||
product_type = "model"
|
||||
icon = "cube"
|
||||
|
||||
create_as_asset_group = True
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
asset_group = super().create(subset_name,
|
||||
asset_group = super().create(product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,15 +8,15 @@ class CreatePointcache(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.pointcache"
|
||||
label = "Point Cache"
|
||||
family = "pointcache"
|
||||
product_type = "pointcache"
|
||||
icon = "gears"
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
# Run parent create method
|
||||
collection = super().create(
|
||||
subset_name, instance_data, pre_create_data
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
"""Create render."""
|
||||
import bpy
|
||||
|
||||
from ayon_core.lib import version_up
|
||||
from ayon_core.hosts.blender.api import plugin
|
||||
from ayon_core.hosts.blender.api.render_lib import prepare_rendering
|
||||
from ayon_core.hosts.blender.api.workio import save_file
|
||||
|
||||
|
||||
class CreateRenderlayer(plugin.BaseCreator):
|
||||
|
|
@ -10,16 +12,16 @@ class CreateRenderlayer(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.render"
|
||||
label = "Render"
|
||||
family = "render"
|
||||
product_type = "render"
|
||||
icon = "eye"
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
try:
|
||||
# Run parent create method
|
||||
collection = super().create(
|
||||
subset_name, instance_data, pre_create_data
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
prepare_rendering(collection)
|
||||
|
|
@ -37,6 +39,7 @@ class CreateRenderlayer(plugin.BaseCreator):
|
|||
# settings. Even the validator to check that the file is saved will
|
||||
# detect the file as saved, even if it isn't. The only solution for
|
||||
# now it is to force the file to be saved.
|
||||
bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
|
||||
filepath = version_up(bpy.data.filepath)
|
||||
save_file(filepath, copy=False)
|
||||
|
||||
return collection
|
||||
|
|
|
|||
|
|
@ -8,15 +8,15 @@ class CreateReview(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.review"
|
||||
label = "Review"
|
||||
family = "review"
|
||||
product_type = "review"
|
||||
icon = "video-camera"
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
# Run parent create method
|
||||
collection = super().create(
|
||||
subset_name, instance_data, pre_create_data
|
||||
product_name, instance_data, pre_create_data
|
||||
)
|
||||
|
||||
if pre_create_data.get("use_selection"):
|
||||
|
|
|
|||
|
|
@ -10,15 +10,15 @@ class CreateRig(plugin.BaseCreator):
|
|||
|
||||
identifier = "io.openpype.creators.blender.rig"
|
||||
label = "Rig"
|
||||
family = "rig"
|
||||
product_type = "rig"
|
||||
icon = "wheelchair"
|
||||
|
||||
create_as_asset_group = True
|
||||
|
||||
def create(
|
||||
self, subset_name: str, instance_data: dict, pre_create_data: dict
|
||||
self, product_name: str, instance_data: dict, pre_create_data: dict
|
||||
):
|
||||
asset_group = super().create(subset_name,
|
||||
asset_group = super().create(product_name,
|
||||
instance_data,
|
||||
pre_create_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import bpy
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import CreatedInstance, AutoCreator
|
||||
from ayon_core.client import get_asset_by_name
|
||||
from ayon_core.hosts.blender.api.plugin import BaseCreator
|
||||
from ayon_core.hosts.blender.api.pipeline import (
|
||||
AVALON_PROPERTY,
|
||||
|
|
@ -19,7 +19,7 @@ class CreateWorkfile(BaseCreator, AutoCreator):
|
|||
"""
|
||||
identifier = "io.openpype.creators.blender.workfile"
|
||||
label = "Workfile"
|
||||
family = "workfile"
|
||||
product_type = "workfile"
|
||||
icon = "fa5.file"
|
||||
|
||||
def create(self):
|
||||
|
|
@ -33,53 +33,71 @@ class CreateWorkfile(BaseCreator, AutoCreator):
|
|||
)
|
||||
|
||||
project_name = self.project_name
|
||||
asset_name = self.create_context.get_current_asset_name()
|
||||
folder_path = self.create_context.get_current_folder_path()
|
||||
task_name = self.create_context.get_current_task_name()
|
||||
host_name = self.create_context.host_name
|
||||
|
||||
existing_asset_name = None
|
||||
existing_folder_path = None
|
||||
if workfile_instance is not None:
|
||||
existing_asset_name = workfile_instance.get("folderPath")
|
||||
existing_folder_path = workfile_instance.get("folderPath")
|
||||
|
||||
if not workfile_instance:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
task_name, task_name, asset_doc, project_name, host_name
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
task_name,
|
||||
host_name,
|
||||
)
|
||||
data = {
|
||||
"folderPath": asset_name,
|
||||
"folderPath": folder_path,
|
||||
"task": task_name,
|
||||
"variant": task_name,
|
||||
}
|
||||
data.update(
|
||||
self.get_dynamic_data(
|
||||
task_name,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
task_name,
|
||||
host_name,
|
||||
workfile_instance,
|
||||
)
|
||||
)
|
||||
self.log.info("Auto-creating workfile instance...")
|
||||
workfile_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
self.product_type, product_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(workfile_instance)
|
||||
|
||||
elif (
|
||||
existing_asset_name != asset_name
|
||||
existing_folder_path != folder_path
|
||||
or workfile_instance["task"] != task_name
|
||||
):
|
||||
# Update instance context if it's different
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
task_name, task_name, asset_doc, project_name, host_name
|
||||
folder_entity = ayon_api.get_folder_by_path(
|
||||
project_name, folder_path
|
||||
)
|
||||
task_entity = ayon_api.get_task_by_name(
|
||||
project_name, folder_entity["id"], task_name
|
||||
)
|
||||
product_name = self.get_product_name(
|
||||
project_name,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
self.default_variant,
|
||||
host_name,
|
||||
)
|
||||
|
||||
workfile_instance["folderPath"] = asset_name
|
||||
workfile_instance["folderPath"] = folder_path
|
||||
workfile_instance["task"] = task_name
|
||||
workfile_instance["subset"] = subset_name
|
||||
workfile_instance["productName"] = product_name
|
||||
|
||||
instance_node = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not instance_node:
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@ from ayon_core.hosts.blender.api import plugin
|
|||
|
||||
|
||||
def append_workfile(context, fname, do_import):
|
||||
asset = context['asset']['name']
|
||||
subset = context['subset']['name']
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
group_name = plugin.prepare_scene_name(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
|
||||
# We need to preserve the original names of the scenes, otherwise,
|
||||
# if there are duplicate names in the current workfile, the imported
|
||||
|
|
|
|||
|
|
@ -134,13 +134,15 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
"""
|
||||
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
containers = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not containers:
|
||||
|
|
@ -159,24 +161,25 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
|
||||
self._link_objects(objects, asset_group, containers, asset_group)
|
||||
|
||||
product_type = context["product"]["productType"]
|
||||
asset_group[AVALON_PROPERTY] = {
|
||||
"schema": "openpype:container-2.0",
|
||||
"id": AVALON_CONTAINER_ID,
|
||||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": product_type,
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -188,15 +191,16 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(get_representation_path(representation))
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
|
|
@ -241,7 +245,7 @@ class CacheModelLoader(plugin.AssetLoader):
|
|||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
|
|
|||
|
|
@ -44,11 +44,11 @@ class BlendActionLoader(plugin.AssetLoader):
|
|||
"""
|
||||
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = plugin.prepare_scene_name(asset, subset)
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
lib_container = plugin.prepare_scene_name(folder_name, product_name)
|
||||
container_name = plugin.prepare_scene_name(
|
||||
asset, subset, namespace
|
||||
folder_name, product_name, namespace
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
|
|
@ -114,7 +114,7 @@ class BlendActionLoader(plugin.AssetLoader):
|
|||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
def update(self, container: Dict, context: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -126,18 +126,18 @@ class BlendActionLoader(plugin.AssetLoader):
|
|||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
|
||||
repre_entity = context["representation"]
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
|
||||
libpath = Path(get_representation_path(representation))
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
logger.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
|
|
@ -241,7 +241,7 @@ class BlendActionLoader(plugin.AssetLoader):
|
|||
# Save the list of objects in the metadata container
|
||||
collection_metadata["objects"] = objects_list
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
collection_metadata["representation"] = repre_entity["id"]
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
|
|
|
|||
|
|
@ -39,13 +39,15 @@ class AudioLoader(plugin.AssetLoader):
|
|||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
|
|
@ -81,11 +83,11 @@ class AudioLoader(plugin.AssetLoader):
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": context["product"]["productType"],
|
||||
"objectName": group_name,
|
||||
"audio": audio
|
||||
}
|
||||
|
|
@ -94,7 +96,7 @@ class AudioLoader(plugin.AssetLoader):
|
|||
self[:] = objects
|
||||
return [objects]
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""Update an audio strip in the sequence editor.
|
||||
|
||||
Arguments:
|
||||
|
|
@ -103,14 +105,15 @@ class AudioLoader(plugin.AssetLoader):
|
|||
representation (openpype:representation-1.0): Representation to
|
||||
update, from `host.ls()`.
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(get_representation_path(representation))
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
|
|
@ -173,8 +176,8 @@ class AudioLoader(plugin.AssetLoader):
|
|||
window_manager.windows[-1].screen.areas[0].type = old_type
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["parent"] = str(representation["parent"])
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
metadata["parent"] = repre_entity["versionId"]
|
||||
metadata["audio"] = new_audio
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
|
|
|
|||
|
|
@ -127,20 +127,22 @@ class BlendLoader(plugin.AssetLoader):
|
|||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
product_type = context["product"]["productType"]
|
||||
except ValueError:
|
||||
family = "model"
|
||||
product_type = "model"
|
||||
|
||||
representation = str(context["representation"]["_id"])
|
||||
representation = context["representation"]["id"]
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
|
|
@ -149,8 +151,8 @@ class BlendLoader(plugin.AssetLoader):
|
|||
|
||||
container, members = self._process_data(libpath, group_name)
|
||||
|
||||
if family == "layout":
|
||||
self._post_process_layout(container, asset, representation)
|
||||
if product_type == "layout":
|
||||
self._post_process_layout(container, folder_name, representation)
|
||||
|
||||
avalon_container.objects.link(container)
|
||||
|
||||
|
|
@ -160,11 +162,11 @@ class BlendLoader(plugin.AssetLoader):
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": context["product"]["productType"],
|
||||
"objectName": group_name,
|
||||
"members": members,
|
||||
}
|
||||
|
|
@ -179,13 +181,14 @@ class BlendLoader(plugin.AssetLoader):
|
|||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""
|
||||
Update the loaded asset.
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
group_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(group_name)
|
||||
libpath = Path(get_representation_path(representation)).as_posix()
|
||||
libpath = Path(get_representation_path(repre_entity)).as_posix()
|
||||
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
|
|
@ -232,8 +235,8 @@ class BlendLoader(plugin.AssetLoader):
|
|||
|
||||
new_data = {
|
||||
"libpath": libpath,
|
||||
"representation": str(representation["_id"]),
|
||||
"parent": str(representation["parent"]),
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
"members": members,
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class BlendSceneLoader(plugin.AssetLoader):
|
|||
|
||||
return None
|
||||
|
||||
def _process_data(self, libpath, group_name, family):
|
||||
def _process_data(self, libpath, group_name, product_type):
|
||||
# Append all the data from the .blend file
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=False, relative=False
|
||||
|
|
@ -82,25 +82,29 @@ class BlendSceneLoader(plugin.AssetLoader):
|
|||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
product_type = context["product"]["productType"]
|
||||
except ValueError:
|
||||
family = "model"
|
||||
product_type = "model"
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
|
||||
bpy.context.scene.collection.children.link(avalon_container)
|
||||
|
||||
container, members = self._process_data(libpath, group_name, family)
|
||||
container, members = self._process_data(
|
||||
libpath, group_name, product_type
|
||||
)
|
||||
|
||||
avalon_container.children.link(container)
|
||||
|
||||
|
|
@ -110,11 +114,11 @@ class BlendSceneLoader(plugin.AssetLoader):
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": context["product"]["productType"],
|
||||
"objectName": group_name,
|
||||
"members": members,
|
||||
}
|
||||
|
|
@ -129,13 +133,14 @@ class BlendSceneLoader(plugin.AssetLoader):
|
|||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""
|
||||
Update the loaded asset.
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
group_name = container["objectName"]
|
||||
asset_group = bpy.data.collections.get(group_name)
|
||||
libpath = Path(get_representation_path(representation)).as_posix()
|
||||
libpath = Path(get_representation_path(repre_entity)).as_posix()
|
||||
|
||||
assert asset_group, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
|
|
@ -167,8 +172,12 @@ class BlendSceneLoader(plugin.AssetLoader):
|
|||
|
||||
self.exec_remove(container)
|
||||
|
||||
family = container["family"]
|
||||
asset_group, members = self._process_data(libpath, group_name, family)
|
||||
product_type = container.get("productType")
|
||||
if product_type is None:
|
||||
product_type = container["family"]
|
||||
asset_group, members = self._process_data(
|
||||
libpath, group_name, product_type
|
||||
)
|
||||
|
||||
for member in members:
|
||||
if member.name in collection_parents:
|
||||
|
|
@ -193,8 +202,8 @@ class BlendSceneLoader(plugin.AssetLoader):
|
|||
|
||||
new_data = {
|
||||
"libpath": libpath,
|
||||
"representation": str(representation["_id"]),
|
||||
"parent": str(representation["parent"]),
|
||||
"representation": repre_entity["id"],
|
||||
"parent": repre_entity["versionId"],
|
||||
"members": members,
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -84,13 +84,15 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
|
||||
libpath = self.filepath_from_context(context)
|
||||
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
|
|
@ -117,18 +119,18 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
"name": name,
|
||||
"namespace": namespace or "",
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": context["product"]["productType"],
|
||||
"objectName": group_name,
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -140,15 +142,16 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(get_representation_path(representation))
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
|
|
@ -183,7 +186,7 @@ class AbcCameraLoader(plugin.AssetLoader):
|
|||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
|
|
|||
|
|
@ -87,13 +87,15 @@ class FbxCameraLoader(plugin.AssetLoader):
|
|||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
|
|
@ -120,18 +122,18 @@ class FbxCameraLoader(plugin.AssetLoader):
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": context["product"]["productType"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -143,15 +145,16 @@ class FbxCameraLoader(plugin.AssetLoader):
|
|||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(get_representation_path(representation))
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
|
|
@ -193,7 +196,7 @@ class FbxCameraLoader(plugin.AssetLoader):
|
|||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
|
|
|||
|
|
@ -131,13 +131,15 @@ class FbxModelLoader(plugin.AssetLoader):
|
|||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
|
|
@ -164,18 +166,18 @@ class FbxModelLoader(plugin.AssetLoader):
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": context["product"]["productType"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = objects
|
||||
return objects
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -187,15 +189,16 @@ class FbxModelLoader(plugin.AssetLoader):
|
|||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(get_representation_path(representation))
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
|
|
@ -248,7 +251,7 @@ class FbxModelLoader(plugin.AssetLoader):
|
|||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
|
|
|||
|
|
@ -50,11 +50,11 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
if anim_collection:
|
||||
bpy.data.collections.remove(anim_collection)
|
||||
|
||||
def _get_loader(self, loaders, family):
|
||||
def _get_loader(self, loaders, product_type):
|
||||
name = ""
|
||||
if family == 'rig':
|
||||
if product_type == 'rig':
|
||||
name = "BlendRigLoader"
|
||||
elif family == 'model':
|
||||
elif product_type == 'model':
|
||||
name = "BlendModelLoader"
|
||||
|
||||
if name == "":
|
||||
|
|
@ -76,10 +76,12 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
|
||||
for element in data:
|
||||
reference = element.get('reference')
|
||||
family = element.get('family')
|
||||
product_type = element.get("product_type")
|
||||
if product_type is None:
|
||||
product_type = element.get("family")
|
||||
|
||||
loaders = loaders_from_representation(all_loaders, reference)
|
||||
loader = self._get_loader(loaders, family)
|
||||
loader = self._get_loader(loaders, product_type)
|
||||
|
||||
if not loader:
|
||||
continue
|
||||
|
|
@ -95,7 +97,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
'parent': asset_group,
|
||||
'transform': element.get('transform'),
|
||||
'action': action,
|
||||
'create_animation': True if family == 'rig' else False,
|
||||
'create_animation': True if product_type == 'rig' else False,
|
||||
'animation_asset': asset
|
||||
}
|
||||
|
||||
|
|
@ -127,10 +129,10 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
# legacy_create(
|
||||
# creator_plugin,
|
||||
# name="camera",
|
||||
# # name=f"{unique_number}_{subset}_animation",
|
||||
# # name=f"{unique_number}_{product[name]}_animation",
|
||||
# asset=asset,
|
||||
# options={"useSelection": False}
|
||||
# # data={"dependencies": str(context["representation"]["_id"])}
|
||||
# # data={"dependencies": context["representation"]["id"]}
|
||||
# )
|
||||
|
||||
def process_asset(self,
|
||||
|
|
@ -146,13 +148,15 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
options: Additional settings dictionary
|
||||
"""
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
asset_name = plugin.prepare_scene_name(asset, subset)
|
||||
unique_number = plugin.get_unique_number(asset, subset)
|
||||
group_name = plugin.prepare_scene_name(asset, subset, unique_number)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
asset_name = plugin.prepare_scene_name(folder_name, product_name)
|
||||
unique_number = plugin.get_unique_number(folder_name, product_name)
|
||||
group_name = plugin.prepare_scene_name(
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
|
||||
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
|
||||
if not avalon_container:
|
||||
|
|
@ -173,18 +177,18 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
"name": name,
|
||||
"namespace": namespace or '',
|
||||
"loader": str(self.__class__.__name__),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
"libpath": libpath,
|
||||
"asset_name": asset_name,
|
||||
"parent": str(context["representation"]["parent"]),
|
||||
"family": context["representation"]["context"]["family"],
|
||||
"parent": context["representation"]["versionId"],
|
||||
"productType": context["product"]["productType"],
|
||||
"objectName": group_name
|
||||
}
|
||||
|
||||
self[:] = asset_group.children
|
||||
return asset_group.children
|
||||
|
||||
def exec_update(self, container: Dict, representation: Dict):
|
||||
def exec_update(self, container: Dict, context: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
|
|
@ -193,15 +197,16 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
"""
|
||||
repre_entity = context["representation"]
|
||||
object_name = container["objectName"]
|
||||
asset_group = bpy.data.objects.get(object_name)
|
||||
libpath = Path(get_representation_path(representation))
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert asset_group, (
|
||||
|
|
@ -239,7 +244,10 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
|
||||
for obj in asset_group.children:
|
||||
obj_meta = obj.get(AVALON_PROPERTY)
|
||||
if obj_meta.get('family') == 'rig':
|
||||
product_type = obj_meta.get("productType")
|
||||
if product_type is None:
|
||||
product_type = obj_meta.get("family")
|
||||
if product_type == "rig":
|
||||
rig = None
|
||||
for child in obj.children:
|
||||
if child.type == 'ARMATURE':
|
||||
|
|
@ -262,7 +270,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
|
|||
asset_group.matrix_basis = mat
|
||||
|
||||
metadata["libpath"] = str(libpath)
|
||||
metadata["representation"] = str(representation["_id"])
|
||||
metadata["representation"] = repre_entity["id"]
|
||||
|
||||
def exec_remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
|
|
|||
|
|
@ -93,18 +93,18 @@ class BlendLookLoader(plugin.AssetLoader):
|
|||
"""
|
||||
|
||||
libpath = self.filepath_from_context(context)
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
folder_name = context["folder"]["name"]
|
||||
product_name = context["product"]["name"]
|
||||
|
||||
lib_container = plugin.prepare_scene_name(
|
||||
asset, subset
|
||||
folder_name, product_name
|
||||
)
|
||||
unique_number = plugin.get_unique_number(
|
||||
asset, subset
|
||||
folder_name, product_name
|
||||
)
|
||||
namespace = namespace or f"{asset}_{unique_number}"
|
||||
namespace = namespace or f"{folder_name}_{unique_number}"
|
||||
container_name = plugin.prepare_scene_name(
|
||||
asset, subset, unique_number
|
||||
folder_name, product_name, unique_number
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
|
|
@ -130,23 +130,24 @@ class BlendLookLoader(plugin.AssetLoader):
|
|||
metadata["objects"] = objects
|
||||
metadata["materials"] = materials
|
||||
|
||||
metadata["parent"] = str(context["representation"]["parent"])
|
||||
metadata["family"] = context["representation"]["context"]["family"]
|
||||
metadata["parent"] = context["representation"]["versionId"]
|
||||
metadata["product_type"] = context["product"]["productType"]
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
def update(self, container: Dict, context: Dict):
|
||||
collection = bpy.data.collections.get(container["objectName"])
|
||||
libpath = Path(get_representation_path(representation))
|
||||
repre_entity = context["representation"]
|
||||
libpath = Path(get_representation_path(repre_entity))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
self.log.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
pformat(repre_entity, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
|
|
@ -201,7 +202,7 @@ class BlendLookLoader(plugin.AssetLoader):
|
|||
collection_metadata["objects"] = objects
|
||||
collection_metadata["materials"] = materials
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
collection_metadata["representation"] = repre_entity["id"]
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
collection = bpy.data.collections.get(container["objectName"])
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class CollectBlenderInstanceData(pyblish.api.InstancePlugin):
|
|||
members.extend(instance_node.children)
|
||||
|
||||
# Special case for animation instances, include armatures
|
||||
if instance.data["family"] == "animation":
|
||||
if instance.data["productType"] == "animation":
|
||||
for obj in instance_node.objects:
|
||||
if obj.type == 'EMPTY' and obj.get(AVALON_PROPERTY):
|
||||
members.extend(
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@ class ExtractABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
filename = f"{instance_name}.abc"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -23,9 +23,9 @@ class ExtractAnimationABC(
|
|||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
filename = f"{instance_name}.abc"
|
||||
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@ class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
families = ["model", "camera", "rig", "action", "layout", "blendScene"]
|
||||
optional = True
|
||||
|
||||
# From settings
|
||||
compress = False
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
|
@ -20,9 +23,9 @@ class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
# Define extract output file path
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
filename = f"{instance_name}.blend"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
@ -53,7 +56,7 @@ class ExtractBlend(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
if node.image and node.image.packed_file is None:
|
||||
node.image.pack()
|
||||
|
||||
bpy.data.libraries.write(filepath, data_blocks)
|
||||
bpy.data.libraries.write(filepath, data_blocks, compress=self.compress)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -16,6 +16,9 @@ class ExtractBlendAnimation(
|
|||
families = ["animation"]
|
||||
optional = True
|
||||
|
||||
# From settings
|
||||
compress = False
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
|
@ -23,9 +26,9 @@ class ExtractBlendAnimation(
|
|||
# Define extract output file path
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
filename = f"{instance_name}.blend"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
@ -46,7 +49,7 @@ class ExtractBlendAnimation(
|
|||
data_blocks.add(child.animation_data.action)
|
||||
data_blocks.add(obj)
|
||||
|
||||
bpy.data.libraries.write(filepath, data_blocks)
|
||||
bpy.data.libraries.write(filepath, data_blocks, compress=self.compress)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -21,9 +21,9 @@ class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
filename = f"{instance_name}.abc"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -20,9 +20,9 @@ class ExtractCamera(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
filename = f"{instance_name}.fbx"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -21,9 +21,9 @@ class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
filename = f"{instance_name}.fbx"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -145,9 +145,9 @@ class ExtractAnimationFBX(
|
|||
|
||||
root.select_set(True)
|
||||
armature.select_set(True)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
fbx_filename = f"{instance_name}_{armature.name}.fbx"
|
||||
filepath = os.path.join(stagingdir, fbx_filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import bpy
|
|||
import bpy_extras
|
||||
import bpy_extras.anim_utils
|
||||
|
||||
from ayon_core.client import get_representation_by_name
|
||||
from ayon_api import get_representations
|
||||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.blender.api import plugin
|
||||
from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
|
@ -133,7 +134,9 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
|
||||
fbx_count = 0
|
||||
|
||||
project_name = instance.context.data["projectEntity"]["name"]
|
||||
project_name = instance.context.data["projectName"]
|
||||
version_ids = set()
|
||||
filtered_assets = []
|
||||
for asset in asset_group.children:
|
||||
metadata = asset.get(AVALON_PROPERTY)
|
||||
if not metadata:
|
||||
|
|
@ -146,40 +149,48 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
)
|
||||
continue
|
||||
|
||||
filtered_assets.append((asset, metadata))
|
||||
version_ids.add(metadata["parent"])
|
||||
|
||||
repre_entities = get_representations(
|
||||
project_name,
|
||||
representation_names={"blend", "fbx", "abc"},
|
||||
version_ids=version_ids,
|
||||
fields={"id", "versionId", "name"}
|
||||
)
|
||||
repre_mapping_by_version_id = {
|
||||
version_id: {}
|
||||
for version_id in version_ids
|
||||
}
|
||||
for repre_entity in repre_entities:
|
||||
version_id = repre_entity["versionId"]
|
||||
repre_mapping_by_version_id[version_id][repre_entity["name"]] = (
|
||||
repre_entity
|
||||
)
|
||||
|
||||
for asset, metadata in filtered_assets:
|
||||
version_id = metadata["parent"]
|
||||
family = metadata["family"]
|
||||
product_type = metadata.get("product_type")
|
||||
if product_type is None:
|
||||
product_type = metadata["family"]
|
||||
|
||||
repres_by_name = repre_mapping_by_version_id[version_id]
|
||||
|
||||
self.log.debug("Parent: {}".format(version_id))
|
||||
# Get blend reference
|
||||
blend = get_representation_by_name(
|
||||
project_name, "blend", version_id, fields=["_id"]
|
||||
)
|
||||
blend_id = None
|
||||
if blend:
|
||||
blend_id = blend["_id"]
|
||||
# Get fbx reference
|
||||
fbx = get_representation_by_name(
|
||||
project_name, "fbx", version_id, fields=["_id"]
|
||||
)
|
||||
fbx_id = None
|
||||
if fbx:
|
||||
fbx_id = fbx["_id"]
|
||||
# Get abc reference
|
||||
abc = get_representation_by_name(
|
||||
project_name, "abc", version_id, fields=["_id"]
|
||||
)
|
||||
abc_id = None
|
||||
if abc:
|
||||
abc_id = abc["_id"]
|
||||
|
||||
json_element = {}
|
||||
if blend_id:
|
||||
json_element["reference"] = str(blend_id)
|
||||
if fbx_id:
|
||||
json_element["reference_fbx"] = str(fbx_id)
|
||||
if abc_id:
|
||||
json_element["reference_abc"] = str(abc_id)
|
||||
json_element["family"] = family
|
||||
# Get blend, fbx and abc reference
|
||||
blend_id = repres_by_name.get("blend", {}).get("id")
|
||||
fbx_id = repres_by_name.get("fbx", {}).get("id")
|
||||
abc_id = repres_by_name.get("abc", {}).get("id")
|
||||
json_element = {
|
||||
key: value
|
||||
for key, value in (
|
||||
("reference", blend_id),
|
||||
("reference_fbx", fbx_id),
|
||||
("reference_abc", abc_id),
|
||||
)
|
||||
if value
|
||||
}
|
||||
json_element["product_type"] = product_type
|
||||
json_element["instance_name"] = asset.name
|
||||
json_element["asset_name"] = metadata["asset_name"]
|
||||
json_element["file_path"] = metadata["libpath"]
|
||||
|
|
@ -215,7 +226,7 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
]
|
||||
|
||||
# Extract the animation as well
|
||||
if family == "rig":
|
||||
if product_type == "rig":
|
||||
f, n = self._export_animation(
|
||||
asset, instance, stagingdir, fbx_count)
|
||||
if f:
|
||||
|
|
@ -225,9 +236,9 @@ class ExtractLayout(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
|
||||
json_data.append(json_element)
|
||||
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
instance_name = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
instance_name = f"{folder_name}_{product_name}"
|
||||
json_filename = f"{instance_name}.json"
|
||||
|
||||
json_path = os.path.join(stagingdir, json_filename)
|
||||
|
|
|
|||
|
|
@ -55,9 +55,9 @@ class ExtractPlayblast(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
|||
|
||||
# get output path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
filename = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
filename = f"{folder_name}_{product_name}"
|
||||
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -32,9 +32,9 @@ class ExtractThumbnail(publish.Extractor):
|
|||
return
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
asset_name = instance.data["assetEntity"]["name"]
|
||||
subset = instance.data["subset"]
|
||||
filename = f"{asset_name}_{subset}"
|
||||
folder_name = instance.data["folderEntity"]["name"]
|
||||
product_name = instance.data["productName"]
|
||||
filename = f"{folder_name}_{product_name}"
|
||||
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
|
|
@ -42,11 +42,11 @@ class ExtractThumbnail(publish.Extractor):
|
|||
|
||||
camera = instance.data.get("review_camera", "AUTO")
|
||||
start = instance.data.get("frameStart", bpy.context.scene.frame_start)
|
||||
family = instance.data.get("family")
|
||||
product_type = instance.data["productType"]
|
||||
isolate = instance.data("isolate", None)
|
||||
|
||||
presets = json.loads(self.presets)
|
||||
preset = presets.get(family, {})
|
||||
preset = presets.get(product_type, {})
|
||||
|
||||
preset.update({
|
||||
"camera": camera,
|
||||
|
|
|
|||
|
|
@ -28,25 +28,26 @@ class IntegrateAnimation(
|
|||
# Update the json file for the setdress to add the published
|
||||
# representations of the animations
|
||||
for json_dict in data:
|
||||
json_product_name = json_dict["productName"]
|
||||
i = None
|
||||
for elem in instance.context:
|
||||
if elem.data.get('subset') == json_dict['subset']:
|
||||
if elem.data["productName"] == json_product_name:
|
||||
i = elem
|
||||
break
|
||||
if not i:
|
||||
continue
|
||||
rep = None
|
||||
pub_repr = i.data.get('published_representations')
|
||||
pub_repr = i.data["published_representations"]
|
||||
for elem in pub_repr:
|
||||
if pub_repr.get(elem).get('representation').get('name') == "fbx":
|
||||
rep = pub_repr.get(elem)
|
||||
if pub_repr[elem]["representation"]["name"] == "fbx":
|
||||
rep = pub_repr[elem]
|
||||
break
|
||||
if not rep:
|
||||
continue
|
||||
obj_id = rep.get('representation').get('_id')
|
||||
obj_id = rep["representation"]["id"]
|
||||
|
||||
if obj_id:
|
||||
json_dict['_id'] = str(obj_id)
|
||||
json_dict["representation_id"] = str(obj_id)
|
||||
|
||||
with open(json_path, "w") as file:
|
||||
json.dump(data, fp=file, indent=2)
|
||||
|
|
|
|||
|
|
@ -28,15 +28,27 @@ class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
|
|||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
tree = bpy.context.scene.node_tree
|
||||
output_type = "CompositorNodeOutputFile"
|
||||
output_node = None
|
||||
# Remove all output nodes that inlcude "AYON" in the name.
|
||||
# There should be only one.
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == output_type and "AYON" in node.name:
|
||||
output_node = node
|
||||
break
|
||||
if not output_node:
|
||||
raise PublishValidationError(
|
||||
"No output node found in the compositor tree."
|
||||
)
|
||||
filepath = bpy.data.filepath
|
||||
file = os.path.basename(filepath)
|
||||
filename, ext = os.path.splitext(file)
|
||||
if filename not in bpy.context.scene.render.filepath:
|
||||
if filename not in output_node.base_path:
|
||||
raise PublishValidationError(
|
||||
"Render output folder "
|
||||
"doesn't match the blender scene name! "
|
||||
"Use Repair action to "
|
||||
"fix the folder file path."
|
||||
"Render output folder doesn't match the blender scene name! "
|
||||
"Use Repair action to fix the folder file path."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ class ValidateFileSaved(pyblish.api.ContextPlugin,
|
|||
hosts = ["blender"]
|
||||
label = "Validate File Saved"
|
||||
optional = False
|
||||
# TODO rename to 'exclude_product_types'
|
||||
exclude_families = []
|
||||
actions = [SaveWorkfileAction]
|
||||
|
||||
|
|
@ -41,8 +42,8 @@ class ValidateFileSaved(pyblish.api.ContextPlugin,
|
|||
|
||||
# Do not validate workfile has unsaved changes if only instances
|
||||
# present of families that should be excluded
|
||||
families = {
|
||||
instance.data["family"] for instance in context
|
||||
product_types = {
|
||||
instance.data["productType"] for instance in context
|
||||
# Consider only enabled instances
|
||||
if instance.data.get("publish", True)
|
||||
and instance.data.get("active", True)
|
||||
|
|
@ -52,7 +53,7 @@ class ValidateFileSaved(pyblish.api.ContextPlugin,
|
|||
return any(family in exclude_family
|
||||
for exclude_family in self.exclude_families)
|
||||
|
||||
if all(is_excluded(family) for family in families):
|
||||
if all(is_excluded(product_type) for product_type in product_types):
|
||||
self.log.debug("Only excluded families found, skipping workfile "
|
||||
"unsaved changes validation..")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -16,9 +16,9 @@ class CelactionPrelaunchHook(PreLaunchHook):
|
|||
launch_types = {LaunchTypes.local}
|
||||
|
||||
def execute(self):
|
||||
asset_doc = self.data["asset_doc"]
|
||||
width = asset_doc["data"]["resolutionWidth"]
|
||||
height = asset_doc["data"]["resolutionHeight"]
|
||||
folder_attributes = self.data["folder_entity"]["attrib"]
|
||||
width = folder_attributes["resolutionWidth"]
|
||||
height = folder_attributes["resolutionHeight"]
|
||||
|
||||
# Add workfile path to launch arguments
|
||||
workfile_path = self.workfile_path()
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.client import get_asset_name_identifier
|
||||
|
||||
|
||||
class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
||||
""" Adds the celaction render instances """
|
||||
|
|
@ -16,24 +14,20 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
|||
staging_dir = os.path.dirname(current_file)
|
||||
scene_file = os.path.basename(current_file)
|
||||
version = context.data["version"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
project_entity = context.data["projectEntity"]
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_entity)
|
||||
folder_entity = context.data["folderEntity"]
|
||||
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
shared_instance_data = {
|
||||
"asset": asset_name,
|
||||
"frameStart": asset_entity["data"]["frameStart"],
|
||||
"frameEnd": asset_entity["data"]["frameEnd"],
|
||||
"handleStart": asset_entity["data"]["handleStart"],
|
||||
"handleEnd": asset_entity["data"]["handleEnd"],
|
||||
"fps": asset_entity["data"]["fps"],
|
||||
"resolutionWidth": asset_entity["data"].get(
|
||||
"resolutionWidth",
|
||||
project_entity["data"]["resolutionWidth"]),
|
||||
"resolutionHeight": asset_entity["data"].get(
|
||||
"resolutionHeight",
|
||||
project_entity["data"]["resolutionHeight"]),
|
||||
"folderPath": folder_entity["path"],
|
||||
"frameStart": folder_attributes["frameStart"],
|
||||
"frameEnd": folder_attributes["frameEnd"],
|
||||
"handleStart": folder_attributes["handleStart"],
|
||||
"handleEnd": folder_attributes["handleEnd"],
|
||||
"fps": folder_attributes["fps"],
|
||||
"resolutionWidth": folder_attributes["resolutionWidth"],
|
||||
"resolutionHeight": folder_attributes["resolutionHeight"],
|
||||
"pixelAspect": 1,
|
||||
"step": 1,
|
||||
"version": version
|
||||
|
|
@ -46,17 +40,18 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
|||
shared_instance_data.update(celaction_kwargs)
|
||||
|
||||
# workfile instance
|
||||
family = "workfile"
|
||||
subset = family + task.capitalize()
|
||||
product_type = "workfile"
|
||||
product_name = product_type + task.capitalize()
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
instance = context.create_instance(product_name)
|
||||
|
||||
# creating instance data
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": scene_file,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"productName": product_name,
|
||||
"productType": product_type,
|
||||
"family": product_type,
|
||||
"families": [product_type],
|
||||
"representations": []
|
||||
})
|
||||
|
||||
|
|
@ -76,17 +71,19 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
|
|||
self.log.info('Publishing Celaction workfile')
|
||||
|
||||
# render instance
|
||||
subset = f"render{task}Main"
|
||||
instance = context.create_instance(name=subset)
|
||||
product_name = f"render{task}Main"
|
||||
product_type = "render.farm"
|
||||
instance = context.create_instance(name=product_name)
|
||||
# getting instance state
|
||||
instance.data["publish"] = True
|
||||
|
||||
# add assetEntity data into instance
|
||||
# add folderEntity data into instance
|
||||
instance.data.update({
|
||||
"label": "{} - farm".format(subset),
|
||||
"family": "render.farm",
|
||||
"families": [],
|
||||
"subset": subset
|
||||
"label": "{} - farm".format(product_name),
|
||||
"productType": product_type,
|
||||
"family": product_type,
|
||||
"families": [product_type],
|
||||
"productName": product_name
|
||||
})
|
||||
|
||||
# adding basic script data
|
||||
|
|
|
|||
|
|
@ -19,12 +19,14 @@ class CollectRenderPath(pyblish.api.InstancePlugin):
|
|||
anatomy = instance.context.data["anatomy"]
|
||||
anatomy_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
padding = anatomy.templates.get("frame_padding", 4)
|
||||
product_type = "render"
|
||||
anatomy_data.update({
|
||||
"frame": f"%0{padding}d",
|
||||
"family": "render",
|
||||
"family": product_type,
|
||||
"representation": self.output_extension,
|
||||
"ext": self.output_extension
|
||||
})
|
||||
anatomy_data["product"]["type"] = product_type
|
||||
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ def containerise(flame_clip_segment,
|
|||
"name": str(name),
|
||||
"namespace": str(namespace),
|
||||
"loader": str(loader),
|
||||
"representation": str(context["representation"]["_id"]),
|
||||
"representation": context["representation"]["id"],
|
||||
}
|
||||
|
||||
if data:
|
||||
|
|
@ -147,8 +147,8 @@ def imprint(segment, data=None):
|
|||
Examples:
|
||||
data = {
|
||||
'asset': 'sq020sh0280',
|
||||
'family': 'render',
|
||||
'subset': 'subsetMain'
|
||||
'productType': 'render',
|
||||
'productName': 'productMain'
|
||||
}
|
||||
"""
|
||||
data = data or {}
|
||||
|
|
|
|||
|
|
@ -353,9 +353,9 @@ class PublishableClip:
|
|||
rename_default = False
|
||||
hierarchy_default = "{_folder_}/{_sequence_}/{_track_}"
|
||||
clip_name_default = "shot_{_trackIndex_:0>3}_{_clipIndex_:0>4}"
|
||||
subset_name_default = "[ track name ]"
|
||||
review_track_default = "[ none ]"
|
||||
subset_family_default = "plate"
|
||||
base_product_name_default = "[ track name ]"
|
||||
base_product_type_default = "plate"
|
||||
count_from_default = 10
|
||||
count_steps_default = 10
|
||||
vertical_sync_default = False
|
||||
|
|
@ -368,7 +368,7 @@ class PublishableClip:
|
|||
|
||||
def __init__(self, segment, **kwargs):
|
||||
self.rename_index = kwargs["rename_index"]
|
||||
self.family = kwargs["family"]
|
||||
self.product_type = kwargs["family"]
|
||||
self.log = kwargs["log"]
|
||||
|
||||
# get main parent objects
|
||||
|
|
@ -486,10 +486,10 @@ class PublishableClip:
|
|||
"countFrom", {}).get("value") or self.count_from_default
|
||||
self.count_steps = self.ui_inputs.get(
|
||||
"countSteps", {}).get("value") or self.count_steps_default
|
||||
self.subset_name = self.ui_inputs.get(
|
||||
"subsetName", {}).get("value") or self.subset_name_default
|
||||
self.subset_family = self.ui_inputs.get(
|
||||
"subsetFamily", {}).get("value") or self.subset_family_default
|
||||
self.base_product_name = self.ui_inputs.get(
|
||||
"productName", {}).get("value") or self.base_product_name_default
|
||||
self.base_product_type = self.ui_inputs.get(
|
||||
"productType", {}).get("value") or self.base_product_type_default
|
||||
self.vertical_sync = self.ui_inputs.get(
|
||||
"vSyncOn", {}).get("value") or self.vertical_sync_default
|
||||
self.driving_layer = self.ui_inputs.get(
|
||||
|
|
@ -509,12 +509,14 @@ class PublishableClip:
|
|||
or self.retimed_framerange_default
|
||||
)
|
||||
|
||||
# build subset name from layer name
|
||||
if self.subset_name == "[ track name ]":
|
||||
self.subset_name = self.track_name
|
||||
# build product name from layer name
|
||||
if self.base_product_name == "[ track name ]":
|
||||
self.base_product_name = self.track_name
|
||||
|
||||
# create subset for publishing
|
||||
self.subset = self.subset_family + self.subset_name.capitalize()
|
||||
# create product for publishing
|
||||
self.product_name = (
|
||||
self.base_product_type + self.base_product_name.capitalize()
|
||||
)
|
||||
|
||||
def _replace_hash_to_expression(self, name, text):
|
||||
""" Replace hash with number in correct padding. """
|
||||
|
|
@ -608,14 +610,14 @@ class PublishableClip:
|
|||
_hero_data = deepcopy(hero_data)
|
||||
_hero_data.update({"heroTrack": False})
|
||||
if _in <= self.clip_in and _out >= self.clip_out:
|
||||
data_subset = hero_data["subset"]
|
||||
data_product_name = hero_data["productName"]
|
||||
# add track index in case duplicity of names in hero data
|
||||
if self.subset in data_subset:
|
||||
_hero_data["subset"] = self.subset + str(
|
||||
if self.product_name in data_product_name:
|
||||
_hero_data["productName"] = self.product_name + str(
|
||||
self.track_index)
|
||||
# in case track name and subset name is the same then add
|
||||
if self.subset_name == self.track_name:
|
||||
_hero_data["subset"] = self.subset
|
||||
# in case track name and product name is the same then add
|
||||
if self.base_product_name == self.track_name:
|
||||
_hero_data["productName"] = self.product_name
|
||||
# assign data to return hierarchy data to tag
|
||||
tag_hierarchy_data = _hero_data
|
||||
break
|
||||
|
|
@ -637,9 +639,9 @@ class PublishableClip:
|
|||
"hierarchy": hierarchy_filled,
|
||||
"parents": self.parents,
|
||||
"hierarchyData": hierarchy_formatting_data,
|
||||
"subset": self.subset,
|
||||
"family": self.subset_family,
|
||||
"families": [self.family]
|
||||
"productName": self.product_name,
|
||||
"productType": self.base_product_type,
|
||||
"families": [self.base_product_type, self.product_type]
|
||||
}
|
||||
|
||||
def _convert_to_entity(self, type, template):
|
||||
|
|
@ -704,7 +706,7 @@ class ClipLoader(LoaderPlugin):
|
|||
_mapping = None
|
||||
_host_settings = None
|
||||
|
||||
def apply_settings(cls, project_settings, system_settings):
|
||||
def apply_settings(cls, project_settings):
|
||||
|
||||
plugin_type_settings = (
|
||||
project_settings
|
||||
|
|
@ -746,18 +748,16 @@ class ClipLoader(LoaderPlugin):
|
|||
Returns:
|
||||
str: colorspace name or None
|
||||
"""
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
colorspace = version_data.get(
|
||||
"colorspace", None
|
||||
)
|
||||
version_entity = context["version"]
|
||||
version_attributes = version_entity["attrib"]
|
||||
colorspace = version_attributes.get("colorSpace")
|
||||
|
||||
if (
|
||||
not colorspace
|
||||
or colorspace == "Unknown"
|
||||
):
|
||||
colorspace = context["representation"]["data"].get(
|
||||
"colorspace", None)
|
||||
"colorspace")
|
||||
|
||||
return colorspace
|
||||
|
||||
|
|
|
|||
|
|
@ -34,4 +34,4 @@ def current_file():
|
|||
|
||||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
return os.path.normpath(session["AYON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -36,8 +36,8 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
self.flame_pythonpath = _env["AYON_FLAME_PYTHONPATH"]
|
||||
|
||||
"""Hook entry method."""
|
||||
project_doc = self.data["project_doc"]
|
||||
project_name = project_doc["name"]
|
||||
project_entity = self.data["project_entity"]
|
||||
project_name = project_entity["name"]
|
||||
volume_name = _env.get("FLAME_WIRETAP_VOLUME")
|
||||
|
||||
# get image io
|
||||
|
|
@ -63,20 +63,22 @@ class FlamePrelaunch(PreLaunchHook):
|
|||
hostname = socket.gethostname() # not returning wiretap host name
|
||||
|
||||
self.log.debug("Collected user \"{}\"".format(user_name))
|
||||
self.log.info(pformat(project_doc))
|
||||
_db_p_data = project_doc["data"]
|
||||
width = _db_p_data["resolutionWidth"]
|
||||
height = _db_p_data["resolutionHeight"]
|
||||
fps = float(_db_p_data["fps"])
|
||||
self.log.info(pformat(project_entity))
|
||||
project_attribs = project_entity["attrib"]
|
||||
width = project_attribs["resolutionWidth"]
|
||||
height = project_attribs["resolutionHeight"]
|
||||
fps = float(project_attribs["fps"])
|
||||
|
||||
project_data = {
|
||||
"Name": project_doc["name"],
|
||||
"Nickname": _db_p_data["code"],
|
||||
"Name": project_entity["name"],
|
||||
"Nickname": project_entity["code"],
|
||||
"Description": "Created by OpenPype",
|
||||
"SetupDir": project_doc["name"],
|
||||
"SetupDir": project_entity["name"],
|
||||
"FrameWidth": int(width),
|
||||
"FrameHeight": int(height),
|
||||
"AspectRatio": float((width / height) * _db_p_data["pixelAspect"]),
|
||||
"AspectRatio": float(
|
||||
(width / height) * project_attribs["pixelAspect"]
|
||||
),
|
||||
"FrameRate": self._get_flame_fps(fps)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
"""Publishable clip"""
|
||||
|
||||
label = "Create Publishable Clip"
|
||||
family = "clip"
|
||||
product_type = "clip"
|
||||
icon = "film"
|
||||
defaults = ["Main"]
|
||||
|
||||
|
|
@ -32,7 +32,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
|
||||
# open widget for plugins inputs
|
||||
results_back = self.create_widget(
|
||||
"Pype publish attributes creator",
|
||||
"AYON publish attributes creator",
|
||||
"Define sequential rename and fill hierarchy data.",
|
||||
gui_inputs
|
||||
)
|
||||
|
|
@ -62,7 +62,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
"log": self.log,
|
||||
"ui_inputs": results_back,
|
||||
"avalon": self.data,
|
||||
"family": self.data["family"]
|
||||
"product_type": self.data["productType"]
|
||||
}
|
||||
|
||||
for i, segment in enumerate(sorted_selected_segments):
|
||||
|
|
@ -203,19 +203,19 @@ class CreateShotClip(opfapi.Creator):
|
|||
"target": "ui",
|
||||
"order": 3,
|
||||
"value": {
|
||||
"subsetName": {
|
||||
"productName": {
|
||||
"value": ["[ track name ]", "main", "bg", "fg", "bg",
|
||||
"animatic"],
|
||||
"type": "QComboBox",
|
||||
"label": "Subset Name",
|
||||
"label": "Product Name",
|
||||
"target": "ui",
|
||||
"toolTip": "chose subset name pattern, if [ track name ] is selected, name of track layer will be used", # noqa
|
||||
"toolTip": "chose product name pattern, if [ track name ] is selected, name of track layer will be used", # noqa
|
||||
"order": 0},
|
||||
"subsetFamily": {
|
||||
"productType": {
|
||||
"value": ["plate", "take"],
|
||||
"type": "QComboBox",
|
||||
"label": "Subset Family",
|
||||
"target": "ui", "toolTip": "What use of this subset is for", # noqa
|
||||
"label": "Product Type",
|
||||
"target": "ui", "toolTip": "What use of this product is for", # noqa
|
||||
"order": 1},
|
||||
"reviewTrack": {
|
||||
"value": ["< none >"] + gui_tracks,
|
||||
|
|
@ -229,7 +229,7 @@ class CreateShotClip(opfapi.Creator):
|
|||
"type": "QCheckBox",
|
||||
"label": "Include audio",
|
||||
"target": "tag",
|
||||
"toolTip": "Process subsets with corresponding audio", # noqa
|
||||
"toolTip": "Process products with corresponding audio", # noqa
|
||||
"order": 3},
|
||||
"sourceResolution": {
|
||||
"value": False,
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from ayon_core.lib.transcoding import (
|
|||
|
||||
|
||||
class LoadClip(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
"""Load a product to timeline as clip
|
||||
|
||||
Place clip to timeline on its asset origin timings collected
|
||||
during conforming to project
|
||||
|
|
@ -31,14 +31,14 @@ class LoadClip(opfapi.ClipLoader):
|
|||
# settings
|
||||
reel_group_name = "OpenPype_Reels"
|
||||
reel_name = "Loaded"
|
||||
clip_name_template = "{asset}_{subset}<_{output}>"
|
||||
clip_name_template = "{folder[name]}_{product[name]}<_{output}>"
|
||||
|
||||
""" Anatomy keys from version context data and dynamically added:
|
||||
- {layerName} - original layer name token
|
||||
- {layerUID} - original layer UID token
|
||||
- {originalBasename} - original clip name taken from file
|
||||
"""
|
||||
layer_rename_template = "{asset}_{subset}<_{output}>"
|
||||
layer_rename_template = "{folder[name]}_{product[name]}<_{output}>"
|
||||
layer_rename_patterns = []
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
|
@ -48,9 +48,9 @@ class LoadClip(opfapi.ClipLoader):
|
|||
self.fpd = fproject.current_workspace.desktop
|
||||
|
||||
# load clip to timeline and get main variables
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
version_entity = context["version"]
|
||||
version_attributes = version_entity["attrib"]
|
||||
version_name = version_entity["version"]
|
||||
colorspace = self.get_colorspace(context)
|
||||
|
||||
# in case output is not in context replace key to representation
|
||||
|
|
@ -70,7 +70,7 @@ class LoadClip(opfapi.ClipLoader):
|
|||
self.log.info("Loading with colorspace: `{}`".format(colorspace))
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = os.environ["AVALON_WORKDIR"]
|
||||
workfile_dir = os.environ["AYON_WORKDIR"]
|
||||
openclip_dir = os.path.join(
|
||||
workfile_dir, clip_name
|
||||
)
|
||||
|
|
@ -112,11 +112,10 @@ class LoadClip(opfapi.ClipLoader):
|
|||
]
|
||||
|
||||
# move all version data keys to tag data
|
||||
data_imprint = {}
|
||||
for key in add_keys:
|
||||
data_imprint.update({
|
||||
key: version_data.get(key, str(None))
|
||||
})
|
||||
data_imprint = {
|
||||
key: version_attributes.get(key, str(None))
|
||||
for key in add_keys
|
||||
}
|
||||
|
||||
# add variables related to version context
|
||||
data_imprint.update({
|
||||
|
|
@ -180,27 +179,27 @@ class LoadClip(opfapi.ClipLoader):
|
|||
# unwrapping segment from input clip
|
||||
pass
|
||||
|
||||
# def switch(self, container, representation):
|
||||
# self.update(container, representation)
|
||||
# def switch(self, container, context):
|
||||
# self.update(container, context)
|
||||
|
||||
# def update(self, container, representation):
|
||||
# def update(self, container, context):
|
||||
# """ Updating previously loaded clips
|
||||
# """
|
||||
|
||||
# # load clip to timeline and get main variables
|
||||
# repre_entity = context['representation']
|
||||
# name = container['name']
|
||||
# namespace = container['namespace']
|
||||
# track_item = phiero.get_track_items(
|
||||
# track_item_name=namespace)
|
||||
# version = io.find_one({
|
||||
# "type": "version",
|
||||
# "_id": representation["parent"]
|
||||
# "id": repre_entity["versionId"]
|
||||
# })
|
||||
# version_data = version.get("data", {})
|
||||
# version_name = version.get("name", None)
|
||||
# colorspace = version_data.get("colorspace", None)
|
||||
# colorspace = version_data.get("colorSpace", None)
|
||||
# object_name = "{}_{}".format(name, namespace)
|
||||
# file = get_representation_path(representation).replace("\\", "/")
|
||||
# file = get_representation_path(repre_entity).replace("\\", "/")
|
||||
# clip = track_item.source()
|
||||
|
||||
# # reconnect media to new path
|
||||
|
|
@ -225,7 +224,7 @@ class LoadClip(opfapi.ClipLoader):
|
|||
|
||||
# # add variables related to version context
|
||||
# data_imprint.update({
|
||||
# "representation": str(representation["_id"]),
|
||||
# "representation": repre_entity["id"],
|
||||
# "version": version_name,
|
||||
# "colorspace": colorspace,
|
||||
# "objectName": object_name
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from ayon_core.lib.transcoding import (
|
|||
)
|
||||
|
||||
class LoadClipBatch(opfapi.ClipLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
"""Load a product to timeline as clip
|
||||
|
||||
Place clip to timeline on its asset origin timings collected
|
||||
during conforming to project
|
||||
|
|
@ -29,14 +29,14 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
|
||||
# settings
|
||||
reel_name = "OP_LoadedReel"
|
||||
clip_name_template = "{batch}_{asset}_{subset}<_{output}>"
|
||||
clip_name_template = "{batch}_{folder[name]}_{product[name]}<_{output}>"
|
||||
|
||||
""" Anatomy keys from version context data and dynamically added:
|
||||
- {layerName} - original layer name token
|
||||
- {layerUID} - original layer UID token
|
||||
- {originalBasename} - original clip name taken from file
|
||||
"""
|
||||
layer_rename_template = "{asset}_{subset}<_{output}>"
|
||||
layer_rename_template = "{folder[name]}_{product[name]}<_{output}>"
|
||||
layer_rename_patterns = []
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
|
@ -45,22 +45,13 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
self.batch = options.get("batch") or flame.batch
|
||||
|
||||
# load clip to timeline and get main variables
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
version_entity = context["version"]
|
||||
version_attributes =version_entity["attrib"]
|
||||
version_name = version_entity["version"]
|
||||
colorspace = self.get_colorspace(context)
|
||||
|
||||
# TODO remove '{folder[name]}' and '{product[name]}' replacement
|
||||
clip_name_template = (
|
||||
self.clip_name_template
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{product[name]}", "{subset}")
|
||||
)
|
||||
layer_rename_template = (
|
||||
self.layer_rename_template
|
||||
.replace("{folder[name]}", "{asset}")
|
||||
.replace("{product[name]}", "{subset}")
|
||||
)
|
||||
clip_name_template = self.clip_name_template
|
||||
layer_rename_template = self.layer_rename_template
|
||||
# in case output is not in context replace key to representation
|
||||
if not context["representation"]["context"].get("output"):
|
||||
clip_name_template = clip_name_template.replace(
|
||||
|
|
@ -68,8 +59,22 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
layer_rename_template = layer_rename_template.replace(
|
||||
"output", "representation")
|
||||
|
||||
folder_entity = context["folder"]
|
||||
product_entity = context["product"]
|
||||
formatting_data = deepcopy(context["representation"]["context"])
|
||||
formatting_data["batch"] = self.batch.name.get_value()
|
||||
formatting_data.update({
|
||||
"asset": folder_entity["name"],
|
||||
"folder": {
|
||||
"name": folder_entity["name"],
|
||||
},
|
||||
"subset": product_entity["name"],
|
||||
"family": product_entity["productType"],
|
||||
"product": {
|
||||
"name": product_entity["name"],
|
||||
"type": product_entity["productType"],
|
||||
}
|
||||
})
|
||||
|
||||
clip_name = StringTemplate(clip_name_template).format(
|
||||
formatting_data)
|
||||
|
|
@ -80,7 +85,7 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
self.log.info("Loading with colorspace: `{}`".format(colorspace))
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"]
|
||||
workfile_dir = options.get("workdir") or os.environ["AYON_WORKDIR"]
|
||||
openclip_dir = os.path.join(
|
||||
workfile_dir, clip_name
|
||||
)
|
||||
|
|
@ -124,7 +129,7 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
|
||||
# move all version data keys to tag data
|
||||
data_imprint = {
|
||||
key: version_data.get(key, str(None))
|
||||
key: version_attributes.get(key, str(None))
|
||||
for key in add_keys
|
||||
}
|
||||
# add variables related to version context
|
||||
|
|
|
|||
|
|
@ -59,6 +59,6 @@ class CollectTestSelection(pyblish.api.ContextPlugin):
|
|||
|
||||
opfapi.imprint(segment, {
|
||||
'asset': segment.name.get_value(),
|
||||
'family': 'render',
|
||||
'subset': 'subsetMain'
|
||||
'productType': 'render',
|
||||
'productName': 'productMain'
|
||||
})
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from types import NoneType
|
|||
import pyblish
|
||||
import ayon_core.hosts.flame.api as opfapi
|
||||
from ayon_core.hosts.flame.otio import flame_export
|
||||
from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
from ayon_core.pipeline.editorial import (
|
||||
is_overlapping_otio_ranges,
|
||||
get_media_range_with_retimes
|
||||
|
|
@ -47,7 +48,9 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
if not marker_data:
|
||||
continue
|
||||
|
||||
if marker_data.get("id") != "pyblish.avalon.instance":
|
||||
if marker_data.get("id") not in {
|
||||
AYON_INSTANCE_ID, AVALON_INSTANCE_ID
|
||||
}:
|
||||
continue
|
||||
|
||||
self.log.debug("__ segment.name: {}".format(
|
||||
|
|
@ -107,24 +110,25 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
# add ocio_data to instance data
|
||||
inst_data.update(otio_data)
|
||||
|
||||
asset = marker_data["asset"]
|
||||
subset = marker_data["subset"]
|
||||
folder_path = marker_data["folderPath"]
|
||||
folder_name = folder_path.rsplit("/")[-1]
|
||||
product_name = marker_data["productName"]
|
||||
|
||||
# insert family into families
|
||||
family = marker_data["family"]
|
||||
# insert product type into families
|
||||
product_type = marker_data["productType"]
|
||||
families = [str(f) for f in marker_data["families"]]
|
||||
families.insert(0, str(family))
|
||||
families.insert(0, str(product_type))
|
||||
|
||||
# form label
|
||||
label = asset
|
||||
if asset != clip_name:
|
||||
label = folder_name
|
||||
if folder_name != clip_name:
|
||||
label += " ({})".format(clip_name)
|
||||
label += " {} [{}]".format(subset, ", ".join(families))
|
||||
label += " {} [{}]".format(product_name, ", ".join(families))
|
||||
|
||||
inst_data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"name": "{}_{}".format(folder_name, product_name),
|
||||
"label": label,
|
||||
"asset": asset,
|
||||
"folderPath": folder_path,
|
||||
"item": segment,
|
||||
"families": families,
|
||||
"publish": marker_data["publish"],
|
||||
|
|
@ -332,26 +336,28 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
if not hierarchy_data:
|
||||
return
|
||||
|
||||
asset = data["asset"]
|
||||
subset = "shotMain"
|
||||
folder_path = data["folderPath"]
|
||||
folder_name = folder_path.rsplit("/")[-1]
|
||||
product_name = "shotMain"
|
||||
|
||||
# insert family into families
|
||||
family = "shot"
|
||||
# insert product type into families
|
||||
product_type = "shot"
|
||||
|
||||
# form label
|
||||
label = asset
|
||||
if asset != clip_name:
|
||||
label = folder_name
|
||||
if folder_name != clip_name:
|
||||
label += " ({}) ".format(clip_name)
|
||||
label += " {}".format(subset)
|
||||
label += " [{}]".format(family)
|
||||
label += " {}".format(product_name)
|
||||
label += " [{}]".format(product_type)
|
||||
|
||||
data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"name": "{}_{}".format(folder_name, product_name),
|
||||
"label": label,
|
||||
"subset": subset,
|
||||
"asset": asset,
|
||||
"family": family,
|
||||
"families": []
|
||||
"productName": product_name,
|
||||
"folderPath": folder_path,
|
||||
"productType": product_type,
|
||||
"family": product_type,
|
||||
"families": [product_type]
|
||||
})
|
||||
|
||||
instance = context.create_instance(**data)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.client import get_asset_name_identifier
|
||||
import ayon_core.hosts.flame.api as opfapi
|
||||
from ayon_core.hosts.flame.otio import flame_export
|
||||
from ayon_core.pipeline.create import get_subset_name
|
||||
from ayon_core.pipeline.create import get_product_name
|
||||
|
||||
|
||||
class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
||||
|
|
@ -14,38 +13,41 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
# plugin defined
|
||||
family = "workfile"
|
||||
product_type = "workfile"
|
||||
variant = "otioTimeline"
|
||||
|
||||
# main
|
||||
asset_doc = context.data["assetEntity"]
|
||||
task_name = context.data["task"]
|
||||
folder_entity = context.data["folderEntity"]
|
||||
project = opfapi.get_current_project()
|
||||
sequence = opfapi.get_current_sequence(opfapi.CTX.selection)
|
||||
|
||||
# create subset name
|
||||
subset_name = get_subset_name(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
# create product name
|
||||
task_entity = context.data["taskEntity"]
|
||||
task_name = task_type = None
|
||||
if task_entity:
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["taskType"]
|
||||
product_name = get_product_name(
|
||||
context.data["projectName"],
|
||||
task_name,
|
||||
task_type,
|
||||
context.data["hostName"],
|
||||
product_type,
|
||||
variant,
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
asset_name = get_asset_name_identifier(asset_doc)
|
||||
|
||||
# adding otio timeline to context
|
||||
with opfapi.maintained_segment_selection(sequence) as selected_seg:
|
||||
otio_timeline = flame_export.create_otio_timeline(sequence)
|
||||
|
||||
instance_data = {
|
||||
"name": subset_name,
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"family": "workfile",
|
||||
"families": []
|
||||
"name": product_name,
|
||||
"folderPath": folder_entity["path"],
|
||||
"productName": product_name,
|
||||
"productType": product_type,
|
||||
"family": product_type,
|
||||
"families": [product_type]
|
||||
}
|
||||
|
||||
# create instance with workfile
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class ExtractProductResources(publish.Extractor):
|
|||
|
||||
# flame objects
|
||||
segment = instance.data["item"]
|
||||
asset_name = instance.data["asset"]
|
||||
folder_path = instance.data["folderPath"]
|
||||
segment_name = segment.name.get_value()
|
||||
clip_path = instance.data["path"]
|
||||
sequence_clip = instance.context.data["flameSequence"]
|
||||
|
|
@ -249,7 +249,7 @@ class ExtractProductResources(publish.Extractor):
|
|||
out_mark = in_mark + source_duration_handles
|
||||
exporting_clip = self.import_clip(clip_path)
|
||||
exporting_clip.name.set_value("{}_{}".format(
|
||||
asset_name, segment_name))
|
||||
folder_path, segment_name))
|
||||
|
||||
# add xml tags modifications
|
||||
modify_xml_data.update({
|
||||
|
|
|
|||
|
|
@ -44,8 +44,8 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin):
|
|||
))
|
||||
|
||||
# load plate to batch group
|
||||
self.log.info("Loading subset `{}` into batch `{}`".format(
|
||||
instance.data["subset"], bgroup.name.get_value()
|
||||
self.log.info("Loading product `{}` into batch `{}`".format(
|
||||
instance.data["productName"], bgroup.name.get_value()
|
||||
))
|
||||
self._load_clip_to_context(instance, bgroup)
|
||||
|
||||
|
|
@ -168,10 +168,10 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin):
|
|||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
frame_duration = (frame_end - frame_start) + 1
|
||||
asset_name = instance.data["asset"]
|
||||
folder_path = instance.data["folderPath"]
|
||||
|
||||
task_name = task_data["name"]
|
||||
batchgroup_name = "{}_{}".format(asset_name, task_name)
|
||||
batchgroup_name = "{}_{}".format(folder_path, task_name)
|
||||
|
||||
batch_data = {
|
||||
"shematic_reels": [
|
||||
|
|
@ -219,7 +219,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin):
|
|||
|
||||
# update task data in anatomy data
|
||||
project_task_types = anatomy_obj["tasks"]
|
||||
task_code = project_task_types.get(task_type, {}).get("short_name")
|
||||
task_code = project_task_types.get(task_type, {}).get("shortName")
|
||||
anatomy_data.update({
|
||||
"task": {
|
||||
"name": task_name,
|
||||
|
|
@ -247,7 +247,7 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin):
|
|||
os.makedirs(render_dir_path, mode=0o777)
|
||||
|
||||
# TODO: add most of these to `imageio/flame/batch/write_node`
|
||||
name = "{project[code]}_{asset}_{task[name]}".format(
|
||||
name = "{project[code]}_{folder[name]}_{task[name]}".format(
|
||||
**anatomy_data
|
||||
)
|
||||
|
||||
|
|
@ -321,16 +321,17 @@ class IntegrateBatchGroup(pyblish.api.InstancePlugin):
|
|||
))
|
||||
|
||||
def _get_shot_task_dir_path(self, instance, task_data):
|
||||
project_doc = instance.data["projectEntity"]
|
||||
asset_entity = instance.data["assetEntity"]
|
||||
project_entity = instance.data["projectEntity"]
|
||||
folder_entity = instance.data["folderEntity"]
|
||||
task_entity = instance.data["taskEntity"]
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
project_settings = instance.context.data["project_settings"]
|
||||
|
||||
return get_workdir(
|
||||
project_doc,
|
||||
asset_entity,
|
||||
task_data["name"],
|
||||
project_entity,
|
||||
folder_entity,
|
||||
task_entity,
|
||||
"flame",
|
||||
anatomy,
|
||||
anatomy=anatomy,
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ def get_fusion_version(app_name):
|
|||
The function is triggered by the prelaunch hooks to get the fusion version.
|
||||
|
||||
`app_name` is obtained by prelaunch hooks from the
|
||||
`launch_context.env.get("AVALON_APP_NAME")`.
|
||||
`launch_context.env.get("AYON_APP_NAME")`.
|
||||
|
||||
To get a correct Fusion version, a version number should be present
|
||||
in the `applications/fusion/variants` key
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue