modified integration plugins to use AYON entity types and functions

This commit is contained in:
Jakub Trllo 2024-03-08 15:54:45 +01:00
parent 0a2b676713
commit 552c9c2147
4 changed files with 362 additions and 316 deletions

View file

@ -2,27 +2,24 @@ import os
import logging
import sys
import copy
import datetime
import clique
import six
import pyblish.api
from ayon_core.client.operations import (
OperationsSession,
new_subset_document,
new_version_doc,
new_representation_doc,
prepare_subset_update_data,
prepare_version_update_data,
prepare_representation_update_data,
)
from ayon_core.client import (
get_representations,
get_subset_by_name,
from ayon_api import (
get_attributes_for_type,
get_product_by_name,
get_version_by_name,
get_representations,
)
from ayon_api.operations import (
OperationsSession,
new_product_entity,
new_version_entity,
new_representation_entity,
)
from ayon_api.utils import create_entity_id
from ayon_core.lib import source_hash
from ayon_core.lib.file_transaction import (
FileTransaction,
@ -36,6 +33,36 @@ from ayon_core.pipeline.publish import (
log = logging.getLogger(__name__)
def prepare_changes(old_entity, new_entity):
"""Prepare changes for entity update.
Args:
old_entity: Existing entity.
new_entity: New entity.
Returns:
dict[str, Any]: Changes that have new entity.
"""
changes = {}
for key in set(new_entity.keys()):
if key == "attrib":
continue
if key in new_entity and new_entity[key] != old_entity.get(key):
changes[key] = new_entity[key]
continue
attrib_changes = {}
if "attrib" in new_entity:
for key, value in new_entity["attrib"].items():
if value != old_entity["attrib"].get(key):
attrib_changes[key] = value
if attrib_changes:
changes["attrib"] = attrib_changes
return changes
def get_instance_families(instance):
"""Get all families of the instance"""
# todo: move this to lib?
@ -164,7 +191,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
]
def process(self, instance):
# Instance should be integrated on a farm
if instance.data.get("farm"):
self.log.debug(
@ -256,23 +282,23 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
template_name = self.get_template_name(instance)
op_session = OperationsSession()
subset = self.prepare_subset(
product_entity = self.prepare_product(
instance, op_session, project_name
)
version = self.prepare_version(
instance, op_session, subset, project_name
version_entity = self.prepare_version(
instance, op_session, product_entity, project_name
)
instance.data["versionEntity"] = version
instance.data["versionEntity"] = version_entity
anatomy = instance.context.data["anatomy"]
# Get existing representations (if any)
existing_repres_by_name = {
repre_doc["name"].lower(): repre_doc
for repre_doc in get_representations(
repre_entity["name"].lower(): repre_entity
for repre_entity in get_representations(
project_name,
version_ids=[version["_id"]],
fields=["_id", "name"]
version_ids=[version_entity["id"]],
fields={"id", "name"}
)
}
@ -284,7 +310,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
repre,
template_name,
existing_repres_by_name,
version,
version_entity,
instance_stagingdir,
instance)
@ -312,7 +338,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
resource_destinations.add(os.path.abspath(dst))
# Bulk write to the database
# We write the subset and version to the database before the File
# We write the product and version to the database before the File
# Transaction to reduce the chances of another publish trying to
# publish to the same version number since that chance can greatly
# increase if the file transaction takes a long time.
@ -320,7 +346,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
self.log.info((
"Product '{}' version {} written to database.."
).format(subset["name"], version["name"]))
).format(product_entity["name"], version_entity["version"]))
# Process all file transfers of all integrations now
self.log.debug("Integrating source files to destination ...")
@ -331,58 +357,46 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"Transferred files: {}".format(file_transactions.transferred))
self.log.debug("Retrieving Representation Site Sync information ...")
# Get the accessible sites for Site Sync
addons_manager = instance.context.data["ayonAddonsManager"]
sync_server_addon = addons_manager.get("sync_server")
if sync_server_addon is None:
sites = [{
"name": "studio",
"created_dt": datetime.datetime.now()
}]
else:
sites = sync_server_addon.compute_resource_sync_sites(
project_name=instance.data["projectEntity"]["name"]
)
self.log.debug("Sync Server Sites: {}".format(sites))
# Compute the resource file infos once (files belonging to the
# version instance instead of an individual representation) so
# we can re-use those file infos per representation
resource_file_infos = self.get_files_info(resource_destinations,
sites=sites,
anatomy=anatomy)
resource_file_infos = self.get_files_info(
resource_destinations, anatomy
)
# Finalize the representations now the published files are integrated
# Get 'files' info for representations and its attached resources
new_repre_names_low = set()
for prepared in prepared_representations:
repre_doc = prepared["representation"]
repre_update_data = prepared["repre_doc_update_data"]
repre_entity = prepared["representation"]
repre_update_data = prepared["repre_update_data"]
transfers = prepared["transfers"]
destinations = [dst for src, dst in transfers]
repre_doc["files"] = self.get_files_info(
destinations, sites=sites, anatomy=anatomy
repre_files = self.get_files_info(
destinations, anatomy
)
# Add the version resource file infos to each representation
repre_doc["files"] += resource_file_infos
repre_files += resource_file_infos
repre_entity["files"] = repre_files
# Set up representation for writing to the database. Since
# we *might* be overwriting an existing entry if the version
# already existed we'll use ReplaceOnce with `upsert=True`
if repre_update_data is None:
op_session.create_entity(
project_name, repre_doc["type"], repre_doc
project_name, "representation", repre_entity
)
else:
# Add files to update data
repre_update_data["files"] = repre_files
op_session.update_entity(
project_name,
repre_doc["type"],
repre_doc["_id"],
"representation",
repre_entity["id"],
repre_update_data
)
new_repre_names_low.add(repre_doc["name"].lower())
new_repre_names_low.add(repre_entity["name"].lower())
# Delete any existing representations that didn't get any new data
# if the instance is not set to append mode
@ -392,7 +406,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# We add the exact representation name because `name` is
# lowercase for name matching only and not in the database
op_session.delete_entity(
project_name, "representation", existing_repres["_id"]
project_name, "representation", existing_repres["id"]
)
self.log.debug("{}".format(op_session.to_data()))
@ -401,7 +415,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Backwards compatibility used in hero integration.
# todo: can we avoid the need to store this?
instance.data["published_representations"] = {
p["representation"]["_id"]: p for p in prepared_representations
p["representation"]["id"]: p
for p in prepared_representations
}
self.log.info(
@ -412,108 +427,131 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
)
)
def prepare_subset(self, instance, op_session, project_name):
def prepare_product(self, instance, op_session, project_name):
folder_entity = instance.data["folderEntity"]
product_name = instance.data["productName"]
product_type = instance.data["productType"]
self.log.debug("Product: {}".format(product_name))
# Get existing subset if it exists
existing_subset_doc = get_subset_by_name(
# Get existing product if it exists
existing_product_entity = get_product_by_name(
project_name, product_name, folder_entity["id"]
)
# Define subset data
# Define product data
data = {
"families": get_instance_families(instance)
}
attribibutes = {}
subset_group = instance.data.get("productGroup")
if subset_group:
data["productGroup"] = subset_group
elif existing_subset_doc:
# Preserve previous subset group if new version does not set it
if "productGroup" in existing_subset_doc.get("data", {}):
subset_group = existing_subset_doc["data"]["productGroup"]
data["productGroup"] = subset_group
product_group = instance.data.get("productGroup")
if product_group:
attribibutes["productGroup"] = product_group
elif existing_product_entity:
# Preserve previous product group if new version does not set it
product_group = existing_product_entity.get("attrib", {}).get(
"productGroup"
)
if product_group is not None:
attribibutes["productGroup"] = product_group
subset_id = None
if existing_subset_doc:
subset_id = existing_subset_doc["_id"]
subset_doc = new_subset_document(
product_name, product_type, folder_entity["id"], data, subset_id
product_id = None
if existing_product_entity:
product_id = existing_product_entity["id"]
product_entity = new_product_entity(
product_name,
product_type,
folder_entity["id"],
data=data,
attribs=attribibutes,
entity_id=product_id
)
if existing_subset_doc is None:
# Create a new subset
if existing_product_entity is None:
# Create a new product
self.log.info(
"Product '%s' not found, creating ..." % product_name
)
op_session.create_entity(
project_name, subset_doc["type"], subset_doc
project_name, "product", product_entity
)
else:
# Update existing subset data with new data and set in database.
# We also change the found subset in-place so we don't need to
# re-query the subset afterwards
subset_doc["data"].update(data)
update_data = prepare_subset_update_data(
existing_subset_doc, subset_doc
# Update existing product data with new data and set in database.
# We also change the found product in-place so we don't need to
# re-query the product afterwards
update_data = prepare_changes(
existing_product_entity, product_entity
)
op_session.update_entity(
project_name,
subset_doc["type"],
subset_doc["_id"],
"product",
product_entity["id"],
update_data
)
self.log.debug("Prepared product: {}".format(product_name))
return subset_doc
return product_entity
def prepare_version(self, instance, op_session, subset_doc, project_name):
def prepare_version(
self, instance, op_session, product_entity, project_name
):
version_number = instance.data["version"]
task_id = None
task_entity = instance.data.get("taskEntity")
if task_entity:
task_id = task_entity["id"]
existing_version = get_version_by_name(
project_name,
version_number,
subset_doc["_id"],
fields=["_id"]
product_entity["id"],
fields={"id"}
)
version_id = None
if existing_version:
version_id = existing_version["_id"]
version_id = existing_version["id"]
version_data = self.create_version_data(instance)
version_doc = new_version_doc(
all_version_data = self.create_version_data(instance)
version_data = {}
version_attributes = {}
attr_defs = self._get_attributes_for_type(instance.context, "version")
for key, value in all_version_data.items():
if key in attr_defs:
version_attributes[key] = value
else:
version_data[key] = value
version_entity = new_version_entity(
version_number,
subset_doc["_id"],
version_data,
version_id
product_entity["id"],
task_id=task_id,
data=version_data,
attribs=version_attributes,
entity_id=version_id,
)
if existing_version:
self.log.debug("Updating existing version ...")
update_data = prepare_version_update_data(
existing_version, version_doc
)
update_data = prepare_changes(existing_version, version_entity)
op_session.update_entity(
project_name,
version_doc["type"],
version_doc["_id"],
"version",
version_entity["id"],
update_data
)
else:
self.log.debug("Creating new version ...")
op_session.create_entity(
project_name, version_doc["type"], version_doc
project_name, "version", version_entity
)
self.log.debug(
"Prepared version: v{0:03d}".format(version_doc["name"])
"Prepared version: v{0:03d}".format(version_entity["version"])
)
return version_doc
return version_entity
def _validate_repre_files(self, files, is_sequence_representation):
"""Validate representation files before transfer preparation.
@ -552,13 +590,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
", ".join([str(rem) for rem in remainders])
))
def prepare_representation(self, repre,
template_name,
existing_repres_by_name,
version,
instance_stagingdir,
instance):
def prepare_representation(
self,
repre,
template_name,
existing_repres_by_name,
version_entity,
instance_stagingdir,
instance
):
# pre-flight validations
if repre["ext"].startswith("."):
raise KnownPublishError((
@ -581,7 +621,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
template_data["ext"] = repre["ext"]
# allow overwriting existing version
template_data["version"] = version["name"]
template_data["version"] = version_entity["version"]
# add template data for colorspaceData
if repre.get("colorspaceData"):
@ -823,7 +863,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
existing = existing_repres_by_name.get(repre["name"].lower())
repre_id = None
if existing:
repre_id = existing["_id"]
repre_id = existing["id"]
# Store first transferred destination as published path data
# - used primarily for reviews that are integrated to custom modules
@ -835,25 +875,37 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# todo: `repre` is not the actual `representation` entity
# we should simplify/clarify difference between data above
# and the actual representation entity for the database
data = repre.get("data", {})
data.update({"path": published_path, "template": template})
attr_defs = self._get_attributes_for_type(
instance.context, "representation"
)
attributes = {"path": published_path, "template": template}
data = {"context": repre_context}
for key, value in repre.get("data", {}).items():
if key in attr_defs:
attributes[key] = value
else:
data[key] = value
# add colorspace data if any exists on representation
if repre.get("colorspaceData"):
data["colorspaceData"] = repre["colorspaceData"]
repre_doc = new_representation_doc(
repre["name"], version["_id"], repre_context, data, repre_id
repre_doc = new_representation_entity(
repre["name"],
version_entity["id"],
# files are filled afterwards
[],
data=data,
attribs=attributes,
entity_id=repre_id
)
update_data = None
if repre_id is not None:
update_data = prepare_representation_update_data(
existing, repre_doc
)
update_data = prepare_changes(existing, repre_doc)
return {
"representation": repre_doc,
"repre_doc_update_data": update_data,
"repre_update_data": update_data,
"anatomy_data": template_data,
"transfers": transfers,
# todo: avoid the need for 'published_files' used by Integrate Hero
@ -950,13 +1002,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
'{root}/MyProject1/Assets...'
Args:
anatomy: anatomy part from instance
path: path (absolute)
Returns:
path: modified path if possible, or unmodified path
+ warning logged
"""
anatomy (Anatomy): Project anatomy.
path (str): Absolute path.
Returns:
str: Path where root path is replaced by formatting string.
"""
success, rootless_path = anatomy.find_root_template_from_path(path)
if success:
path = rootless_path
@ -967,43 +1019,41 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
).format(path))
return path
def get_files_info(self, destinations, sites, anatomy):
def get_files_info(self, filepaths, anatomy):
"""Prepare 'files' info portion for representations.
Arguments:
destinations (list): List of transferred file destinations
sites (list): array of published locations
anatomy: anatomy part from instance
Returns:
output_resources: array of dictionaries to be added to 'files' key
in representation
"""
filepaths (Iterable[str]): List of transferred file paths.
anatomy (Anatomy): Project anatomy.
Returns:
list[dict[str, Any]]: Representation 'files' information.
"""
file_infos = []
for file_path in destinations:
file_info = self.prepare_file_info(file_path, anatomy, sites=sites)
for filepath in filepaths:
file_info = self.prepare_file_info(filepath, anatomy)
file_infos.append(file_info)
return file_infos
def prepare_file_info(self, path, anatomy, sites):
def prepare_file_info(self, path, anatomy):
""" Prepare information for one file (asset or resource)
Arguments:
path: destination url of published file
anatomy: anatomy part from instance
sites: array of published locations,
[ {'name':'studio', 'created_dt':date} by default
keys expected ['studio', 'site1', 'gdrive1']
path (str): Destination url of published file.
anatomy (Anatomy): Project anatomy part from instance.
Returns:
dict: file info dictionary
"""
dict[str, Any]: Representation file info dictionary.
"""
return {
"id": create_entity_id(),
"name": os.path.basename(path),
"path": self.get_rootless_path(anatomy, path),
"size": os.path.getsize(path),
"hash": source_hash(path),
"sites": sites
"hash_type": "op3",
}
def _validate_path_in_project_roots(self, anatomy, file_path):
@ -1012,10 +1062,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
Used to check that published path belongs to project, eg. we are not
trying to publish to local only folder.
Args:
anatomy (Anatomy)
file_path (str)
Raises
(KnownPublishError)
anatomy (Anatomy): Project anatomy.
file_path (str): Filepath.
Raises:
KnownPublishError: When failed to find root for the path.
"""
path = self.get_rootless_path(anatomy, file_path)
if not path:
@ -1023,3 +1074,21 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"Destination path '{}' ".format(file_path) +
"must be in project dir"
))
def _get_attributes_for_type(self, context, entity_type):
return self._get_attributes_by_type(context)[entity_type]
def _get_attributes_by_type(self, context):
attributes = context.data.get("ayonAttributes")
if attributes is None:
attributes = {}
for key in (
"project",
"folder",
"product",
"version",
"representation",
):
attributes[key] = get_attributes_for_type(key)
context.data["ayonAttributes"] = attributes
return attributes

View file

@ -1,30 +1,53 @@
import os
import copy
import clique
import errno
import shutil
import clique
import pyblish.api
from ayon_core.client import (
get_version_by_id,
get_hero_version_by_subset_id,
get_archived_representations,
get_representations,
)
from ayon_core.client.operations import (
import ayon_api
from ayon_api.operations import (
OperationsSession,
new_version_entity,
)
from ayon_core.client.operations import (
new_hero_version_doc,
prepare_hero_version_update_data,
prepare_representation_update_data,
)
from ayon_core.lib import create_hard_link
from ayon_core.pipeline import (
schema
)
from ayon_core.pipeline.publish import get_publish_template_name
def prepare_changes(old_entity, new_entity):
"""Prepare changes for entity update.
Args:
old_entity: Existing entity.
new_entity: New entity.
Returns:
dict[str, Any]: Changes that have new entity.
"""
changes = {}
for key in set(new_entity.keys()):
if key == "attrib":
continue
if key in new_entity and new_entity[key] != old_entity.get(key):
changes[key] = new_entity[key]
continue
attrib_changes = {}
if "attrib" in new_entity:
for key, value in new_entity["attrib"].items():
if value != old_entity["attrib"].get(key):
attrib_changes[key] = value
if attrib_changes:
changes["attrib"] = attrib_changes
return changes
class IntegrateHeroVersion(pyblish.api.InstancePlugin):
label = "Integrate Hero Version"
# Must happen after IntegrateNew
@ -150,7 +173,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
))
return
if src_version_entity["name"] == 0:
if src_version_entity["version"] == 0:
self.log.debug(
"Version 0 cannot have hero version. Skipping."
)
@ -200,39 +223,45 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
old_version, old_repres = self.current_hero_ents(
project_name, src_version_entity
)
old_repres_by_name = {
repre["name"].lower(): repre for repre in old_repres
}
inactive_old_repres_by_name = {}
old_repres_by_name = {}
for repre in old_repres:
low_name = repre["name"].lower()
if repre["active"]:
old_repres_by_name[low_name] = repre
else:
inactive_old_repres_by_name[low_name] = repre
op_session = OperationsSession()
entity_id = None
if old_version:
entity_id = old_version["_id"]
entity_id = old_version["id"]
new_hero_version = new_hero_version_doc(
src_version_entity["parent"],
copy.deepcopy(src_version_entity["data"]),
src_version_entity["name"],
entity_id=entity_id
new_hero_version = new_version_entity(
src_version_entity["version"],
src_version_entity["productId"],
task_id=src_version_entity["taskId"],
data=copy.deepcopy(src_version_entity["data"]),
attribs=copy.deepcopy(src_version_entity["attrib"]),
entity_id=entity_id,
)
if old_version:
self.log.debug("Replacing old hero version.")
update_data = prepare_hero_version_update_data(
update_data = prepare_changes(
old_version, new_hero_version
)
op_session.update_entity(
project_name,
new_hero_version["type"],
old_version["_id"],
"version",
old_version["id"],
update_data
)
else:
self.log.debug("Creating first hero version.")
op_session.create_entity(
project_name, new_hero_version["type"], new_hero_version
project_name, "version", new_hero_version
)
# Separate old representations into `to replace` and `to delete`
@ -249,16 +278,6 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
if old_repres_by_name:
old_repres_to_delete = old_repres_by_name
archived_repres = list(get_archived_representations(
project_name,
# Check what is type of archived representation
version_ids=[new_hero_version["_id"]]
))
archived_repres_by_name = {}
for repre in archived_repres:
repre_name_low = repre["name"].lower()
archived_repres_by_name[repre_name_low] = repre
backup_hero_publish_dir = None
if os.path.exists(hero_publish_dir):
backup_hero_publish_dir = hero_publish_dir + ".BACKUP"
@ -322,7 +341,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
# Get filled path to repre context
template_filled = path_template_obj.format_strict(anatomy_data)
repre_data = {
repre_attributes = {
"path": str(template_filled),
"template": hero_template
}
@ -333,11 +352,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
repre_context[key] = value
# Prepare new repre
repre = copy.deepcopy(repre_info["representation"])
repre["parent"] = new_hero_version["_id"]
repre["context"] = repre_context
repre["data"] = repre_data
repre.pop("_id", None)
repre_entity = copy.deepcopy(repre_info["representation"])
repre_entity.pop("id", None)
repre_entity["versionId"] = new_hero_version["id"]
repre_entity["context"] = repre_context
repre_entity["attrib"] = repre_attributes
# Prepare paths of source and destination files
if len(published_files) == 1:
@ -378,82 +397,48 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
)
# replace original file name with hero name in repre doc
for index in range(len(repre.get("files"))):
file = repre.get("files")[index]
file_name = os.path.basename(file.get('path'))
for src_file, dst_file in src_to_dst_file_paths:
src_file_name = os.path.basename(src_file)
if src_file_name == file_name:
repre["files"][index]["path"] = self._update_path(
anatomy, repre["files"][index]["path"],
src_file, dst_file)
dst_paths = []
for _, dst_path in src_to_dst_file_paths:
dst_paths.append(dst_path)
repre_files = self.get_files_info(dst_paths, anatomy)
repre_entity["files"] = repre_files
repre["files"][index]["hash"] = self._update_hash(
repre["files"][index]["hash"],
src_file_name, dst_file
)
schema.validate(repre)
repre_name_low = repre["name"].lower()
repre_name_low = repre_entity["name"].lower()
# Replace current representation
if repre_name_low in old_repres_to_replace:
old_repre = old_repres_to_replace.pop(repre_name_low)
repre["_id"] = old_repre["_id"]
update_data = prepare_representation_update_data(
old_repre, repre)
# Keep previously synchronized sites up-to-date
# by comparing old and new sites and adding old sites
# if missing in new ones
# Prepare all sites from all files in old representation
old_site_names = set()
for file_info in old_repre.get("files", []):
old_site_names |= {
site["name"]
for site in file_info["sites"]
}
for file_info in update_data.get("files", []):
file_info.setdefault("sites", [])
file_info_site_names = {
site["name"]
for site in file_info["sites"]
}
for site_name in old_site_names:
if site_name not in file_info_site_names:
file_info["sites"].append({
"name": site_name
})
repre_entity["id"] = old_repre["id"]
update_data = prepare_changes(old_repre, repre_entity)
op_session.update_entity(
project_name,
old_repre["type"],
old_repre["_id"],
"representation",
old_repre["id"],
update_data
)
# Unarchive representation
elif repre_name_low in archived_repres_by_name:
archived_repre = archived_repres_by_name.pop(
elif repre_name_low in inactive_old_repres_by_name:
inactive_repre = inactive_old_repres_by_name.pop(
repre_name_low
)
repre["_id"] = archived_repre["old_id"]
update_data = prepare_representation_update_data(
archived_repre, repre)
repre_entity["id"] = inactive_repre["id"]
update_data = prepare_changes(inactive_repre, repre_entity)
op_session.update_entity(
project_name,
old_repre["type"],
archived_repre["_id"],
"representation",
inactive_repre["id"],
update_data
)
# Create representation
else:
repre.pop("_id", None)
op_session.create_entity(project_name, "representation",
repre)
op_session.create_entity(
project_name,
"representation",
repre_entity
)
self.path_checks = []
@ -467,27 +452,13 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
self.copy_file(src_path, dst_path)
# Archive not replaced old representations
for repre_name_low, repre in old_repres_to_delete.items():
# Replace archived representation (This is backup)
# - should not happen to have both repre and archived repre
if repre_name_low in archived_repres_by_name:
archived_repre = archived_repres_by_name.pop(
repre_name_low
)
changes = {"old_id": repre["_id"],
"_id": archived_repre["_id"],
"type": archived_repre["type"]}
op_session.update_entity(project_name,
archived_repre["type"],
archived_repre["_id"],
changes)
else:
repre["old_id"] = repre.pop("_id")
repre["type"] = "archived_representation"
op_session.create_entity(project_name,
"archived_representation",
repre)
for repre in old_repres_to_delete.values():
op_session.update_entity(
project_name,
"representation",
repre["id"],
{"active": False}
)
op_session.commit()
@ -519,13 +490,42 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
instance.data["productName"]
))
def get_all_files_from_path(self, path):
files = []
for (dir_path, dir_names, file_names) in os.walk(path):
for file_name in file_names:
_path = os.path.join(dir_path, file_name)
files.append(_path)
return files
def get_files_info(self, filepaths, anatomy):
"""Prepare 'files' info portion for representations.
Arguments:
filepaths (Iterable[str]): List of transferred file paths.
anatomy (Anatomy): Project anatomy.
Returns:
list[dict[str, Any]]: Representation 'files' information.
"""
file_infos = []
for filepath in filepaths:
file_info = self.prepare_file_info(filepath, anatomy)
file_infos.append(file_info)
return file_infos
def prepare_file_info(self, path, anatomy):
""" Prepare information for one file (asset or resource)
Arguments:
path (str): Destination url of published file.
anatomy (Anatomy): Project anatomy part from instance.
Returns:
dict[str, Any]: Representation file info dictionary.
"""
return {
"id": create_entity_id(),
"name": os.path.basename(path),
"path": self.get_rootless_path(anatomy, path),
"size": os.path.getsize(path),
"hash": source_hash(path),
"hash_type": "op3",
}
def get_publish_dir(self, instance, template_key):
anatomy = instance.context.data["anatomy"]
@ -617,48 +617,25 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
def version_from_representations(self, project_name, repres):
for repre in repres:
version = get_version_by_id(project_name, repre["parent"])
version = ayon_api.get_version_by_id(
project_name, repre["versionId"]
)
if version:
return version
def current_hero_ents(self, project_name, version):
hero_version = get_hero_version_by_subset_id(
project_name, version["parent"]
hero_version = ayon_api.get_hero_version_by_product_id(
project_name, version["productId"]
)
if not hero_version:
return (None, [])
hero_repres = list(get_representations(
project_name, version_ids=[hero_version["_id"]]
hero_repres = list(ayon_api.get_representations(
project_name, version_ids={hero_version["id"]}
))
return (hero_version, hero_repres)
def _update_path(self, anatomy, path, src_file, dst_file):
"""
Replaces source path with new hero path
'path' contains original path with version, must be replaced with
'hero' path (with 'hero' label and without version)
Args:
anatomy (Anatomy) - to get rootless style of path
path (string) - path from DB
src_file (string) - original file path
dst_file (string) - hero file path
"""
_, rootless = anatomy.find_root_template_from_path(dst_file)
_, rtls_src = anatomy.find_root_template_from_path(src_file)
return path.replace(rtls_src, rootless)
def _update_hash(self, hash, src_file_name, dst_file):
"""
Updates hash value with proper hero name
"""
src_file_name = self._get_name_without_ext(src_file_name)
hero_file_name = self._get_name_without_ext(dst_file)
return hash.replace(src_file_name, hero_file_name)
def _get_name_without_ext(self, value):
file_name = os.path.basename(value)
file_name, _ = os.path.splitext(file_name)

View file

@ -27,8 +27,7 @@ import collections
import pyblish.api
import ayon_api
from ayon_core.client.operations import OperationsSession
from ayon_api.operations import OperationsSession
InstanceFilterResult = collections.namedtuple(
"InstanceFilterResult",
@ -162,8 +161,6 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
version_entities_by_id,
project_name
):
from ayon_core.client.operations import create_thumbnail
# Make sure each entity id has defined only one thumbnail id
thumbnail_info_by_entity_id = {}
for instance_item in filtered_instance_items:
@ -176,7 +173,9 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
).format(instance_label))
continue
thumbnail_id = create_thumbnail(project_name, thumbnail_path)
thumbnail_id = ayon_api.create_thumbnail(
project_name, thumbnail_path
)
# Set thumbnail id for version
thumbnail_info_by_entity_id[version_id] = {
@ -194,7 +193,7 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
folder_path = instance.data["folderPath"]
thumbnail_info_by_entity_id[folder_id] = {
"thumbnail_id": thumbnail_id,
"entity_type": "asset",
"entity_type": "folder",
}
self.log.debug("Setting thumbnail for folder \"{}\" <{}>".format(
folder_path, version_id
@ -207,7 +206,7 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
project_name,
thumbnail_info["entity_type"],
entity_id,
{"data.thumbnail_id": thumbnail_id}
{"thumbnailId": thumbnail_id}
)
op_session.commit()

View file

@ -1,7 +1,6 @@
import pyblish.api
import ayon_api
from ayon_core.client.operations import OperationsSession
from ayon_api.operations import OperationsSession
class IntegrateVersionAttributes(pyblish.api.ContextPlugin):
@ -33,6 +32,8 @@ class IntegrateVersionAttributes(pyblish.api.ContextPlugin):
version_entity = instance.data.get("versionEntity")
if not version_entity:
continue
current_attributes = version_entity["attrib"]
attributes = instance.data.get("versionAttributes")
if not attributes:
self.log.debug((
@ -45,7 +46,7 @@ class IntegrateVersionAttributes(pyblish.api.ContextPlugin):
for attr, value in attributes.items():
if attr not in available_attributes:
skipped_attributes.add(attr)
else:
elif current_attributes.get(attr) != value:
filtered_attributes[attr] = value
if not filtered_attributes: