Integrator: Enforce unique destination transfers, disallow overwrites in queued transfers (#4662)

* Fix #4656: Enforce unique destination transfer in Integrator

Note that this is per instance - it doesn't validate cross-instance destinations in the context

* Use explicit DuplicateDestinationError and raise as KnownPublishError
This commit is contained in:
Roy Nieterau 2023-03-21 15:29:19 +01:00 committed by GitHub
parent 6f16f3e4f4
commit 96e184c8ca
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 35 additions and 3 deletions

View file

@ -13,6 +13,16 @@ else:
from shutil import copyfile
class DuplicateDestinationError(ValueError):
"""Error raised when transfer destination already exists in queue.
The error is only raised if `allow_queue_replacements` is False on the
FileTransaction instance and the added file to transfer is of a different
src file than the one already detected in the queue.
"""
class FileTransaction(object):
"""File transaction with rollback options.
@ -44,7 +54,7 @@ class FileTransaction(object):
MODE_COPY = 0
MODE_HARDLINK = 1
def __init__(self, log=None):
def __init__(self, log=None, allow_queue_replacements=False):
if log is None:
log = logging.getLogger("FileTransaction")
@ -60,6 +70,8 @@ class FileTransaction(object):
# Backup file location mapping to original locations
self._backup_to_original = {}
self._allow_queue_replacements = allow_queue_replacements
def add(self, src, dst, mode=MODE_COPY):
"""Add a new file to transfer queue.
@ -82,6 +94,14 @@ class FileTransaction(object):
src, dst))
return
else:
if not self._allow_queue_replacements:
raise DuplicateDestinationError(
"Transfer to destination is already in queue: "
"{} -> {}. It's not allowed to be replaced by "
"a new transfer from {}".format(
queued_src, dst, src
))
self.log.warning("File transfer in queue replaced..")
self.log.debug(
"Removed from queue: {} -> {} replaced by {} -> {}".format(

View file

@ -24,7 +24,10 @@ from openpype.client import (
get_version_by_name,
)
from openpype.lib import source_hash
from openpype.lib.file_transaction import FileTransaction
from openpype.lib.file_transaction import (
FileTransaction,
DuplicateDestinationError
)
from openpype.pipeline.publish import (
KnownPublishError,
get_publish_template_name,
@ -170,9 +173,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
).format(instance.data["family"]))
return
file_transactions = FileTransaction(log=self.log)
file_transactions = FileTransaction(log=self.log,
# Enforce unique transfers
allow_queue_replacements=False)
try:
self.register(instance, file_transactions, filtered_repres)
except DuplicateDestinationError as exc:
# Raise DuplicateDestinationError as KnownPublishError
# and rollback the transactions
file_transactions.rollback()
six.reraise(KnownPublishError,
KnownPublishError(exc),
sys.exc_info()[2])
except Exception:
# clean destination
# todo: preferably we'd also rollback *any* changes to the database