Merge pull request #2800 from pypeclub/bugfix/OP-2737_Ftrack-circular-dependency-error

Ftrack: Unset task ids from asset versions before tasks are removed
This commit is contained in:
Jakub Trllo 2022-02-24 11:26:33 +01:00 committed by GitHub
commit a9f2d6d2a6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 46 additions and 8 deletions

View file

@ -3,8 +3,9 @@ import uuid
from datetime import datetime
from bson.objectid import ObjectId
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from avalon.api import AvalonMongoDB
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype_modules.ftrack.lib.avalon_sync import create_chunks
class DeleteAssetSubset(BaseAction):
@ -554,8 +555,8 @@ class DeleteAssetSubset(BaseAction):
ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)
))
entities_by_link_len = (
self._filter_entities_to_delete(ftrack_ids_to_delete, session)
entities_by_link_len = self._prepare_entities_before_delete(
ftrack_ids_to_delete, session
)
for link_len in sorted(entities_by_link_len.keys(), reverse=True):
for entity in entities_by_link_len[link_len]:
@ -609,7 +610,7 @@ class DeleteAssetSubset(BaseAction):
return self.report_handle(report_messages, project_name, event)
def _filter_entities_to_delete(self, ftrack_ids_to_delete, session):
def _prepare_entities_before_delete(self, ftrack_ids_to_delete, session):
"""Filter children entities to avoid CircularDependencyError."""
joined_ids_to_delete = ", ".join(
["\"{}\"".format(id) for id in ftrack_ids_to_delete]
@ -638,6 +639,21 @@ class DeleteAssetSubset(BaseAction):
parent_ids_to_delete.append(entity["id"])
to_delete_entities.append(entity)
# Unset 'task_id' from AssetVersion entities
# - when task is deleted the asset version is not marked for deletion
task_ids = set(
entity["id"]
for entity in to_delete_entities
if entity.entity_type.lower() == "task"
)
for chunk in create_chunks(task_ids):
asset_versions = session.query((
"select id, task_id from AssetVersion where task_id in ({})"
).format(self.join_query_keys(chunk))).all()
for asset_version in asset_versions:
asset_version["task_id"] = None
session.commit()
entities_by_link_len = collections.defaultdict(list)
for entity in to_delete_entities:
entities_by_link_len[len(entity["link"])].append(entity)

View file

@ -33,6 +33,30 @@ CURRENT_DOC_SCHEMAS = {
}
def create_chunks(iterable, chunk_size=None):
"""Separate iterable into multiple chunks by size.
Args:
iterable(list|tuple|set): Object that will be separated into chunks.
chunk_size(int): Size of one chunk. Default value is 200.
Returns:
list<list>: Chunked items.
"""
chunks = []
if not iterable:
return chunks
tupled_iterable = tuple(iterable)
iterable_size = len(tupled_iterable)
if chunk_size is None:
chunk_size = 200
for idx in range(0, iterable_size, chunk_size):
chunks.append(tupled_iterable[idx:idx + chunk_size])
return chunks
def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
schema_name = "asset-3.0"
if in_schema:
@ -1147,10 +1171,8 @@ class SyncEntitiesFactory:
ids_len = len(tupled_ids)
chunk_size = int(5000 / ids_len)
all_links = []
for idx in range(0, ids_len, chunk_size):
entity_ids_joined = join_query_keys(
tupled_ids[idx:idx + chunk_size]
)
for chunk in create_chunks(ftrack_ids, chunk_size):
entity_ids_joined = join_query_keys(chunk)
all_links.extend(self.session.query((
"select from_id, to_id from"