Integrator: Enforce unique destination transfers, disallow overwrites in queued transfers (#4662)

* Fix #4656: Enforce unique destination transfer in Integrator

Note that this is per instance - it doesn't validate cross-instance destinations in the context

* Use explicit DuplicateDestinationError and raise as KnownPublishError
This commit is contained in:
Roy Nieterau 2023-03-21 15:29:19 +01:00 committed by GitHub
parent 6f16f3e4f4
commit 96e184c8ca
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 35 additions and 3 deletions

View file

@ -24,7 +24,10 @@ from openpype.client import (
get_version_by_name,
)
from openpype.lib import source_hash
from openpype.lib.file_transaction import FileTransaction
from openpype.lib.file_transaction import (
FileTransaction,
DuplicateDestinationError
)
from openpype.pipeline.publish import (
KnownPublishError,
get_publish_template_name,
@ -170,9 +173,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
).format(instance.data["family"]))
return
file_transactions = FileTransaction(log=self.log)
file_transactions = FileTransaction(log=self.log,
# Enforce unique transfers
allow_queue_replacements=False)
try:
self.register(instance, file_transactions, filtered_repres)
except DuplicateDestinationError as exc:
# Raise DuplicateDestinationError as KnownPublishError
# and rollback the transactions
file_transactions.rollback()
six.reraise(KnownPublishError,
KnownPublishError(exc),
sys.exc_info()[2])
except Exception:
# clean destination
# todo: preferably we'd also rollback *any* changes to the database