diff --git a/pype/modules/sync_server/providers/gdrive.py b/pype/modules/sync_server/providers/gdrive.py index 2207fdf3a3..5bc6f21b38 100644 --- a/pype/modules/sync_server/providers/gdrive.py +++ b/pype/modules/sync_server/providers/gdrive.py @@ -8,6 +8,7 @@ from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload from pype.api import Logger from pype.api import get_system_settings from ..utils import time_function +import time SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly', 'https://www.googleapis.com/auth/drive.file', @@ -42,6 +43,7 @@ class GDriveHandler(AbstractProvider): """ FOLDER_STR = 'application/vnd.google-apps.folder' MY_DRIVE_STR = 'My Drive' # name of root folder of regular Google drive + CHUNK_SIZE = 2097152 # must be divisible by 256! def __init__(self, site_name, tree=None, presets=None): self.presets = None @@ -277,7 +279,9 @@ class GDriveHandler(AbstractProvider): path = new_path_key return folder_id - def upload_file(self, source_path, path, overwrite=False): + def upload_file(self, source_path, path, + server, collection, file, representation, site, + overwrite=False): """ Uploads single file from 'source_path' to destination 'path'. It creates all folders on the path if are not existing. @@ -287,6 +291,13 @@ class GDriveHandler(AbstractProvider): path (string): absolute path with or without name of the file overwrite (boolean): replace existing file + arguments for saving progress: + server (SyncServer): server instance to call update_db on + collection (str): name of collection + file (dict): info about uploaded file (matches structure from db) + representation (dict): complete repre containing 'file' + site (str): site name + Returns: (string) file_id of created/modified file , throws FileExistsError, FileNotFoundError exceptions @@ -302,8 +313,8 @@ class GDriveHandler(AbstractProvider): path = os.path.dirname(path) else: target_name = os.path.basename(source_path) - file = self.file_path_exists(path + "/" + target_name) - if file and not overwrite: + target_file = self.file_path_exists(path + "/" + target_name) + if target_file and not overwrite: raise FileExistsError("File already exists, " "use 'overwrite' argument") @@ -316,23 +327,45 @@ class GDriveHandler(AbstractProvider): } media = MediaFileUpload(source_path, mimetype='application/octet-stream', + chunksize=self.CHUNK_SIZE, resumable=True) try: - if not file: + if not target_file: # update doesnt like parent file_metadata['parents'] = [folder_id] - file = self.service.files().create(body=file_metadata, - supportsAllDrives=True, - media_body=media, - fields='id').execute() + request = self.service.files().create(body=file_metadata, + supportsAllDrives=True, + media_body=media, + fields='id') else: - file = self.service.files().update(fileId=file["id"], - body=file_metadata, - supportsAllDrives=True, - media_body=media, - fields='id').execute() + request = self.service.files().update(fileId=target_file["id"], + body=file_metadata, + supportsAllDrives=True, + media_body=media, + fields='id') + + media.stream() + log.debug("Start Upload! {}".format(source_path)) + last_tick = status = response = None + status_val = 0 + while response is None: + if status: + status_val = float(status.progress()) + if not last_tick or \ + time.time() - last_tick >= server.LOG_PROGRESS_SEC: + last_tick = time.time() + log.debug("Uploaded %d%%." % + int(status_val * 100)) + server.update_db(collection=collection, + new_file_id=None, + file=file, + representation=representation, + site=site, + progress=status_val + ) + status, response = request.next_chunk() except errors.HttpError as ex: if ex.resp['status'] == '404': @@ -344,13 +377,14 @@ class GDriveHandler(AbstractProvider): log.warning("Forbidden received, hit quota. " "Injecting 60s delay.") - import time time.sleep(60) return False raise - return file["id"] + return response['id'] - def download_file(self, source_path, local_path, overwrite=False): + def download_file(self, source_path, local_path, + server, collection, file, representation, site, + overwrite=False): """ Downloads single file from 'source_path' (remote) to 'local_path'. It creates all folders on the local_path if are not existing. @@ -361,6 +395,13 @@ class GDriveHandler(AbstractProvider): local_path (string): absolute path with or without name of the file overwrite (boolean): replace existing file + arguments for saving progress: + server (SyncServer): server instance to call update_db on + collection (str): name of collection + file (dict): info about uploaded file (matches structure from db) + representation (dict): complete repre containing 'file' + site (str): site name + Returns: (string) file_id of created/modified file , throws FileExistsError, FileNotFoundError exceptions @@ -378,9 +419,9 @@ class GDriveHandler(AbstractProvider): else: # just folder, get file name from source target_name = os.path.basename(source_path) - file = os.path.isfile(local_path + "/" + target_name) + local_file = os.path.isfile(local_path + "/" + target_name) - if file and not overwrite: + if local_file and not overwrite: raise FileExistsError("File already exists, " "use 'overwrite' argument") @@ -389,9 +430,24 @@ class GDriveHandler(AbstractProvider): with open(local_path + "/" + target_name, "wb") as fh: downloader = MediaIoBaseDownload(fh, request) - done = False - while done is False: - status, done = downloader.next_chunk() + last_tick = status = response = None + status_val = 0 + while response is None: + if status: + status_val = float(status.progress()) + if not last_tick or \ + time.time() - last_tick >= server.LOG_PROGRESS_SEC: + last_tick = time.time() + log.debug("Downloaded %d%%." % + int(status_val * 100)) + server.update_db(collection=collection, + new_file_id=None, + file=file, + representation=representation, + site=site, + progress=status_val + ) + status, response = downloader.next_chunk() return target_name diff --git a/pype/modules/sync_server/providers/resources/gdrive.png b/pype/modules/sync_server/providers/resources/gdrive.png new file mode 100644 index 0000000000..e6c9131454 Binary files /dev/null and b/pype/modules/sync_server/providers/resources/gdrive.png differ diff --git a/pype/modules/sync_server/providers/resources/studio.png b/pype/modules/sync_server/providers/resources/studio.png new file mode 100644 index 0000000000..e95e9762f8 Binary files /dev/null and b/pype/modules/sync_server/providers/resources/studio.png differ diff --git a/pype/modules/sync_server/sync_server.py b/pype/modules/sync_server/sync_server.py index 167be665f5..84637a1d62 100644 --- a/pype/modules/sync_server/sync_server.py +++ b/pype/modules/sync_server/sync_server.py @@ -18,7 +18,7 @@ from .utils import time_function import six from pype.lib import PypeLogger -from .. import PypeModule, ITrayService +from .. import PypeModule, ITrayModule if six.PY2: web = asyncio = STATIC_DIR = WebSocketAsync = None @@ -34,7 +34,7 @@ class SyncStatus(Enum): DO_DOWNLOAD = 2 -class SyncServer(PypeModule, ITrayService): +class SyncServer(PypeModule, ITrayModule): """ Synchronization server that is syncing published files from local to any of implemented providers (like GDrive, S3 etc.) @@ -92,6 +92,7 @@ class SyncServer(PypeModule, ITrayService): # set 0 to no limit REPRESENTATION_LIMIT = 100 DEFAULT_SITE = 'studio' + LOG_PROGRESS_SEC = 5 # how often log progress to DB name = "sync_server" label = "Sync Server" @@ -116,6 +117,8 @@ class SyncServer(PypeModule, ITrayService): self.presets = None # settings for all enabled projects for sync self.sync_server_thread = None # asyncio requires new thread + self.action_show_widget = None + def connect_with_modules(self, *_a, **kw): return @@ -131,21 +134,26 @@ class SyncServer(PypeModule, ITrayService): self.presets = None self.lock = threading.Lock() + self.connection = AvalonMongoDB() + self.connection.install() try: self.presets = self.get_synced_presets() self.set_active_sites(self.presets) - self.sync_server_thread = SyncServerThread(self) + from .tray.app import SyncServerWindow + self.widget = SyncServerWindow(self) except ValueError: - log.info("No system setting for sync. Not syncing.") + log.info("No system setting for sync. Not syncing.", exc_info=True) + self.enabled = False except KeyError: log.info(( "There are not set presets for SyncServer OR " "Credentials provided are invalid, " "no syncing possible"). format(str(self.presets)), exc_info=True) + self.enabled = False def tray_start(self): """ @@ -185,6 +193,19 @@ class SyncServer(PypeModule, ITrayService): exc_info=True ) + def tray_menu(self, parent_menu): + if not self.enabled: + return + + from Qt import QtWidgets + """Add menu or action to Tray(or parent)'s menu""" + action = QtWidgets.QAction("SyncServer", parent_menu) + action.triggered.connect(self.show_widget) + parent_menu.addAction(action) + parent_menu.addSeparator() + + self.action_show_widget = action + @property def is_running(self): return self.sync_server_thread.is_running @@ -245,7 +266,8 @@ class SyncServer(PypeModule, ITrayService): settings = get_project_settings(project_name) sync_settings = settings.get("global")["sync_server"] if not sync_settings: - log.info("No project setting for Sync Server, not syncing.") + log.info("No project setting for {}, not syncing.". + format(project_name)) return {} if sync_settings.get("enabled"): return sync_settings @@ -406,8 +428,8 @@ class SyncServer(PypeModule, ITrayService): return SyncStatus.DO_NOTHING - async def upload(self, file, representation, provider_name, site_name, - tree=None, preset=None): + async def upload(self, collection, file, representation, provider_name, + site_name, tree=None, preset=None): """ Upload single 'file' of a 'representation' to 'provider'. Source url is taken from 'file' portion, where {root} placeholder @@ -418,6 +440,7 @@ class SyncServer(PypeModule, ITrayService): from GDrive), 'created_dt' - time of upload Args: + collection (str): source collection file (dictionary): of file from representation in Mongo representation (dictionary): of representation provider_name (string): gdrive, gdc etc. @@ -447,21 +470,28 @@ class SyncServer(PypeModule, ITrayService): err = "Folder {} wasn't created. Check permissions.".\ format(target_folder) raise NotADirectoryError(err) - + _, remote_site = self.get_sites_for_project(collection) loop = asyncio.get_running_loop() file_id = await loop.run_in_executor(None, handler.upload_file, local_file, remote_file, - True) + self, + collection, + file, + representation, + remote_site, + True + ) return file_id - async def download(self, file, representation, provider_name, + async def download(self, collection, file, representation, provider_name, site_name, tree=None, preset=None): """ Downloads file to local folder denoted in representation.Context. Args: + collection (str): source collection file (dictionary) : info about processed file representation (dictionary): repr that 'file' belongs to provider_name (string): 'gdrive' etc @@ -485,26 +515,37 @@ class SyncServer(PypeModule, ITrayService): local_folder = os.path.dirname(local_file) os.makedirs(local_folder, exist_ok=True) + local_site, _ = self.get_sites_for_project(collection) + loop = asyncio.get_running_loop() file_id = await loop.run_in_executor(None, handler.download_file, remote_file, local_file, - False) + False, + self, + collection, + file, + representation, + local_site + ) return file_id - def update_db(self, new_file_id, file, representation, provider_name, - error=None): + def update_db(self, collection, new_file_id, file, representation, + site, error=None, progress=None): """ Update 'provider' portion of records in DB with success (file_id) or error (exception) Args: + collection (string): name of project - force to db connection as + each file might come from different collection new_file_id (string): file (dictionary): info about processed file (pulled from DB) representation (dictionary): parent repr of file (from DB) - provider_name (string): label ('gdrive', 'S3') + site (string): label ('gdrive', 'S3') error (string): exception message + progress (float): 0-1 of progress of upload/download Returns: None @@ -518,26 +559,33 @@ class SyncServer(PypeModule, ITrayService): file_index, _ = self._get_file_info(representation.get('files', []), file_id) site_index, _ = self._get_provider_rec(file.get('sites', []), - provider_name) + site) update = {} if new_file_id: update["$set"] = self._get_success_dict(file_index, site_index, new_file_id) # reset previous errors if any update["$unset"] = self._get_error_dict(file_index, site_index, - "", "") + "", "", "") + elif progress is not None: + update["$set"] = self._get_progress_dict(file_index, site_index, + progress) else: - tries = self._get_tries_count(file, provider_name) + tries = self._get_tries_count(file, site) tries += 1 update["$set"] = self._get_error_dict(file_index, site_index, error, tries) + self.connection.Session["AVALON_PROJECT"] = collection self.connection.update_one( query, update ) + if progress is not None: + return + status = 'failed' error_str = 'with error {}'.format(error) if new_file_id: @@ -553,7 +601,7 @@ class SyncServer(PypeModule, ITrayService): def _get_file_info(self, files, _id): """ Return record from list of records which name matches to 'provider' - Could be possibly refactored with '_get_file_info' together. + Could be possibly refactored with '_get_provider_rec' together. Args: files (list): of dictionaries with info about published files @@ -590,7 +638,7 @@ class SyncServer(PypeModule, ITrayService): return -1, None def reset_provider_for_file(self, collection, representation_id, - file_id, site_name): + file_id, side): """ Reset information about synchronization for particular 'file_id' and provider. @@ -599,7 +647,7 @@ class SyncServer(PypeModule, ITrayService): collection (string): name of project (eg. collection) in DB representation_id(string): _id of representation file_id (string): file _id in representation - site_name (string): 'gdrive', 'S3' etc + side (string): local or remote side Returns: None """ @@ -607,19 +655,25 @@ class SyncServer(PypeModule, ITrayService): query = { "_id": ObjectId(representation_id) } - self.connection.Session["AVALON_PROJECT"] = collection - representation = list(self.connection.find(query)) + + representation = list(self.connection.database[collection].find(query)) if not representation: raise ValueError("Representation {} not found in {}". format(representation_id, collection)) + local_site, remote_site = self.get_sites_for_project(collection) + if side == 'local': + site_name = local_site + else: + site_name = remote_site + files = representation[0].get('files', []) file_index, _ = self._get_file_info(files, file_id) site_index, _ = self._get_provider_rec(files[file_index]. get('sites', []), site_name) - if file_index > 0 and site_index > 0: + if file_index >= 0 and site_index >= 0: elem = {"name": site_name} update = { "$set": {"files.{}.sites.{}".format(file_index, site_index): @@ -627,7 +681,7 @@ class SyncServer(PypeModule, ITrayService): } } - self.connection.update_one( + self.connection.database[collection].update_one( query, update ) @@ -641,6 +695,10 @@ class SyncServer(PypeModule, ITrayService): """ return int(self.presets[project_name]["config"]["loop_delay"]) + def show_widget(self): + """Show dialog to enter credentials""" + self.widget.show() + def _get_success_dict(self, file_index, site_index, new_file_id): """ Provide success metadata ("id", "created_dt") to be stored in Db. @@ -660,7 +718,8 @@ class SyncServer(PypeModule, ITrayService): datetime.utcnow()} return val - def _get_error_dict(self, file_index, site_index, error="", tries=""): + def _get_error_dict(self, file_index, site_index, + error="", tries="", progress=""): """ Provide error metadata to be stored in Db. Used for set (error and tries provided) or unset mode. @@ -675,7 +734,9 @@ class SyncServer(PypeModule, ITrayService): val = {"files.{}.sites.{}.last_failed_dt". format(file_index, site_index): datetime.utcnow(), "files.{}.sites.{}.error".format(file_index, site_index): error, - "files.{}.sites.{}.tries".format(file_index, site_index): tries + "files.{}.sites.{}.tries".format(file_index, site_index): tries, + "files.{}.sites.{}.progress".format(file_index, site_index): + progress } return val @@ -703,6 +764,22 @@ class SyncServer(PypeModule, ITrayService): _, rec = self._get_provider_rec(file.get("sites", []), provider) return rec.get("tries", 0) + def _get_progress_dict(self, file_index, site_index, progress): + """ + Provide progress metadata to be stored in Db. + Used during upload/download for GUI to show. + Args: + file_index: (int) - index of modified file + site_index: (int) - index of modified site of modified file + progress: (float) - 0-1 progress of upload/download + Returns: + (dictionary) + """ + val = {"files.{}.sites.{}.progress". + format(file_index, site_index): progress + } + return val + def _get_local_file_path(self, file, local_root): """ Auxiliary function for replacing rootless path with real path @@ -848,23 +925,27 @@ class SyncServerThread(threading.Thread): tree = handler.get_tree() limit -= 1 task = asyncio.create_task( - self.module.upload(file, + self.module.upload(collection, + file, sync, provider, site, tree, site_preset)) task_files_to_process.append(task) - # store info for exception handling + # store info for exception handlingy files_processed_info.append((file, sync, - site)) + site, + collection + )) processed_file_path.add(file_path) if status == SyncStatus.DO_DOWNLOAD: tree = handler.get_tree() limit -= 1 task = asyncio.create_task( - self.module.download(file, + self.module.download(collection, + file, sync, provider, site, @@ -874,7 +955,9 @@ class SyncServerThread(threading.Thread): files_processed_info.append((file, sync, - local)) + local, + collection + )) processed_file_path.add(file_path) log.debug("Sync tasks count {}". @@ -884,12 +967,13 @@ class SyncServerThread(threading.Thread): return_exceptions=True) for file_id, info in zip(files_created, files_processed_info): - file, representation, site = info + file, representation, site, collection = info error = None if isinstance(file_id, BaseException): error = str(file_id) file_id = None - self.module.update_db(file_id, + self.module.update_db(collection, + file_id, file, representation, site, diff --git a/pype/modules/sync_server/tray/app.py b/pype/modules/sync_server/tray/app.py new file mode 100644 index 0000000000..afd103f9d5 --- /dev/null +++ b/pype/modules/sync_server/tray/app.py @@ -0,0 +1,1509 @@ +from Qt import QtWidgets, QtCore, QtGui +from Qt.QtCore import Qt +from pype.tools.settings.settings.widgets.base import ProjectListWidget +import attr +import os +from pype.tools.settings.settings import style +from avalon.tools.delegates import PrettyTimeDelegate, pretty_timestamp + +from pype.lib import PypeLogger + +import json + +log = PypeLogger().get_logger("SyncServer") + +STATUS = { + 0: 'In Progress', + 1: 'Failed', + 2: 'Queued', + 3: 'Paused', + 4: 'Synced OK', + -1: 'Not available' +} + + +class SyncServerWindow(QtWidgets.QDialog): + """ + Main window that contains list of synchronizable projects and summary + view with all synchronizable representations for first project + """ + + def __init__(self, sync_server, parent=None): + super(SyncServerWindow, self).__init__(parent) + self.setWindowFlags(QtCore.Qt.Window) + self.setFocusPolicy(QtCore.Qt.StrongFocus) + + self.setStyleSheet(style.load_stylesheet()) + self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) + self.resize(1400, 800) + + body = QtWidgets.QWidget(self) + footer = QtWidgets.QWidget(self) + footer.setFixedHeight(20) + + container = QtWidgets.QWidget() + projects = SyncProjectListWidget(sync_server, self) + projects.refresh() # force selection of default + repres = SyncRepresentationWidget(sync_server, + project=projects.current_project, + parent=self) + + container_layout = QtWidgets.QHBoxLayout(container) + container_layout.setContentsMargins(0, 0, 0, 0) + split = QtWidgets.QSplitter() + split.addWidget(projects) + split.addWidget(repres) + split.setSizes([180, 950, 200]) + container_layout.addWidget(split) + + container.setLayout(container_layout) + + body_layout = QtWidgets.QHBoxLayout(body) + body_layout.addWidget(container) + body_layout.setContentsMargins(0, 0, 0, 0) + + message = QtWidgets.QLabel(footer) + message.hide() + + footer_layout = QtWidgets.QVBoxLayout(footer) + footer_layout.addWidget(message) + footer_layout.setContentsMargins(0, 0, 0, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(body) + layout.addWidget(footer) + + self.setLayout(body_layout) + self.setWindowTitle("Sync Server") + + projects.project_changed.connect( + lambda: repres.table_view.model().set_project( + projects.current_project)) + + +class SyncProjectListWidget(ProjectListWidget): + """ + Lists all projects that are synchronized to choose from + """ + + def __init__(self, sync_server, parent): + super(SyncProjectListWidget, self).__init__(parent) + self.sync_server = sync_server + + def validate_context_change(self): + return True + + def refresh(self): + model = self.project_list.model() + model.clear() + + for project_name in self.sync_server.get_synced_presets().keys(): + model.appendRow(QtGui.QStandardItem(project_name)) + + if len(self.sync_server.get_synced_presets().keys()) == 0: + model.appendRow(QtGui.QStandardItem("No project configured")) + + self.current_project = self.project_list.currentIndex().data( + QtCore.Qt.DisplayRole + ) + if not self.current_project: + self.current_project = self.project_list.model().item(0). \ + data(QtCore.Qt.DisplayRole) + + +class SyncRepresentationWidget(QtWidgets.QWidget): + """ + Summary dialog with list of representations that matches current + settings 'local_site' and 'remote_site'. + """ + active_changed = QtCore.Signal() # active index changed + + default_widths = ( + ("asset", 210), + ("subset", 190), + ("version", 10), + ("representation", 90), + ("created_dt", 100), + ("sync_dt", 100), + ("local_site", 60), + ("remote_site", 70), + ("files_count", 70), + ("files_size", 70), + ("priority", 20), + ("state", 50) + ) + + def __init__(self, sync_server, project=None, parent=None): + super(SyncRepresentationWidget, self).__init__(parent) + + self.sync_server = sync_server + + self._selected_id = None # keep last selected _id + + self.filter = QtWidgets.QLineEdit() + self.filter.setPlaceholderText("Filter representations..") + + top_bar_layout = QtWidgets.QHBoxLayout() + top_bar_layout.addWidget(self.filter) + + self.table_view = QtWidgets.QTableView() + headers = [item[0] for item in self.default_widths] + + model = SyncRepresentationModel(sync_server, headers, project) + self.table_view.setModel(model) + self.table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + self.table_view.setSelectionMode( + QtWidgets.QAbstractItemView.SingleSelection) + self.table_view.setSelectionBehavior( + QtWidgets.QAbstractItemView.SelectRows) + self.table_view.horizontalHeader().setSortIndicator( + -1, Qt.AscendingOrder) + self.table_view.setSortingEnabled(True) + self.table_view.setAlternatingRowColors(True) + self.table_view.verticalHeader().hide() + + time_delegate = PrettyTimeDelegate(self) + column = self.table_view.model().get_header_index("created_dt") + self.table_view.setItemDelegateForColumn(column, time_delegate) + column = self.table_view.model().get_header_index("sync_dt") + self.table_view.setItemDelegateForColumn(column, time_delegate) + + column = self.table_view.model().get_header_index("local_site") + delegate = ImageDelegate(self) + self.table_view.setItemDelegateForColumn(column, delegate) + + column = self.table_view.model().get_header_index("remote_site") + delegate = ImageDelegate(self) + self.table_view.setItemDelegateForColumn(column, delegate) + + column = self.table_view.model().get_header_index("files_size") + delegate = SizeDelegate(self) + self.table_view.setItemDelegateForColumn(column, delegate) + + for column_name, width in self.default_widths: + idx = model.get_header_index(column_name) + self.table_view.setColumnWidth(idx, width) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addLayout(top_bar_layout) + layout.addWidget(self.table_view) + + self.table_view.doubleClicked.connect(self._double_clicked) + self.filter.textChanged.connect(lambda: model.set_filter( + self.filter.text())) + self.table_view.customContextMenuRequested.connect( + self._on_context_menu) + + self.table_view.model().modelReset.connect(self._set_selection) + + self.selection_model = self.table_view.selectionModel() + self.selection_model.selectionChanged.connect(self._selection_changed) + + def _selection_changed(self, new_selection): + index = self.selection_model.currentIndex() + self._selected_id = self.table_view.model().data(index, Qt.UserRole) + + def _set_selection(self): + """ + Sets selection to 'self._selected_id' if exists. + + Keep selection during model refresh. + """ + if self._selected_id: + index = self.table_view.model().get_index(self._selected_id) + if index and index.isValid(): + mode = QtCore.QItemSelectionModel.Select | \ + QtCore.QItemSelectionModel.Rows + self.selection_model.setCurrentIndex(index, mode) + else: + self._selected_id = None + + def _double_clicked(self, index): + """ + Opens representation dialog with all files after doubleclick + """ + _id = self.table_view.model().data(index, Qt.UserRole) + detail_window = SyncServerDetailWindow( + self.sync_server, _id, self.table_view.model()._project) + detail_window.exec() + + def _on_context_menu(self, point): + """ + Shows menu with loader actions on Right-click. + """ + point_index = self.table_view.indexAt(point) + if not point_index.isValid(): + return + + +class SyncRepresentationModel(QtCore.QAbstractTableModel): + PAGE_SIZE = 19 + REFRESH_SEC = 5000 + DEFAULT_SORT = { + "updated_dt_remote": -1, + "_id": 1 + } + SORT_BY_COLUMN = [ + "context.asset", # asset + "context.subset", # subset + "context.version", # version + "context.representation", # representation + "updated_dt_local", # local created_dt + "updated_dt_remote", # remote created_dt + "avg_progress_local", # local progress + "avg_progress_remote", # remote progress + "files_count", # count of files + "files_size", # file size of all files + "context.asset", # priority TODO + "status" # state + ] + + numberPopulated = QtCore.Signal(int) + + @attr.s + class SyncRepresentation: + """ + Auxiliary object for easier handling. + + Fields must contain all header values (+ any arbitrary values). + """ + _id = attr.ib() + asset = attr.ib() + subset = attr.ib() + version = attr.ib() + representation = attr.ib() + created_dt = attr.ib(default=None) + sync_dt = attr.ib(default=None) + local_site = attr.ib(default=None) + remote_site = attr.ib(default=None) + files_count = attr.ib(default=None) + files_size = attr.ib(default=None) + priority = attr.ib(default=None) + state = attr.ib(default=None) + + def __init__(self, sync_server, header, project=None): + super(SyncRepresentationModel, self).__init__() + self._header = header + self._data = [] + self._project = project + self._rec_loaded = 0 + self._buffer = [] # stash one page worth of records (actually cursor) + self.filter = None + + self._initialized = False + + self.sync_server = sync_server + # TODO think about admin mode + # this is for regular user, always only single local and single remote + self.local_site, self.remote_site = \ + self.sync_server.get_sites_for_project(self._project) + + self.projection = self.get_default_projection() + + self.sort = self.DEFAULT_SORT + + self.query = self.get_default_query() + self.default_query = list(self.get_default_query()) + log.debug("!!! init query: {}".format(json.dumps(self.query, + indent=4))) + representations = self.dbcon.aggregate(self.query) + self.refresh(representations) + + self.timer = QtCore.QTimer() + self.timer.timeout.connect(self.tick) + self.timer.start(self.REFRESH_SEC) + + @property + def dbcon(self): + return self.sync_server.connection.database[self._project] + + def data(self, index, role): + item = self._data[index.row()] + + if role == Qt.DisplayRole: + return attr.asdict(item)[self._header[index.column()]] + if role == Qt.UserRole: + return item._id + + def rowCount(self, index): + return len(self._data) + + def columnCount(self, index): + return len(self._header) + + def headerData(self, section, orientation, role): + if role == Qt.DisplayRole: + if orientation == Qt.Horizontal: + return str(self._header[section]) + + def tick(self): + self.refresh(representations=None, load_records=self._rec_loaded) + self.timer.start(self.REFRESH_SEC) + + def get_header_index(self, value): + """ + Returns index of 'value' in headers + + Args: + value (str): header name value + Returns: + (int) + """ + return self._header.index(value) + + def refresh(self, representations=None, load_records=0): + self.beginResetModel() + self._data = [] + self._rec_loaded = 0 + + if not representations: + self.query = self.get_default_query(load_records) + representations = self.dbcon.aggregate(self.query) + + self._add_page_records(self.local_site, self.remote_site, + representations) + self.endResetModel() + + def _add_page_records(self, local_site, remote_site, representations): + for repre in representations: + context = repre.get("context").pop() + files = repre.get("files", []) + if isinstance(files, dict): # aggregate returns dictionary + files = [files] + + # representation without files doesnt concern us + if not files: + continue + + local_updated = remote_updated = None + if repre.get('updated_dt_local'): + local_updated = \ + repre.get('updated_dt_local').strftime("%Y%m%dT%H%M%SZ") + + if repre.get('updated_dt_remote'): + remote_updated = \ + repre.get('updated_dt_remote').strftime("%Y%m%dT%H%M%SZ") + + avg_progress_remote = repre.get('avg_progress_remote', '') + avg_progress_local = repre.get('avg_progress_local', '') + + item = self.SyncRepresentation( + repre.get("_id"), + context.get("asset"), + context.get("subset"), + "v{:0>3d}".format(context.get("version", 1)), + context.get("representation"), + local_updated, + remote_updated, + '{} {}'.format(local_site, avg_progress_local), + '{} {}'.format(remote_site, avg_progress_remote), + repre.get("files_count", 1), + repre.get("files_size", 0), + 1, + STATUS[repre.get("status", -1)] + ) + + self._data.append(item) + self._rec_loaded += 1 + + def canFetchMore(self, index): + """ + Check if there are more records than currently loaded + """ + # 'skip' might be suboptimal when representation hits 500k+ + self._buffer = list(self.dbcon.aggregate(self.query)) + # log.info("!!! canFetchMore _rec_loaded::{} - {}".format( + # self._rec_loaded, len(self._buffer))) + return len(self._buffer) > self._rec_loaded + + def fetchMore(self, index): + """ + Add more record to model. + + Called when 'canFetchMore' returns true, which means there are + more records in DB than loaded. + 'self._buffer' is used to stash cursor to limit requery + """ + log.debug("fetchMore") + # cursor.count() returns always total number, not only skipped + limit + remainder = len(self._buffer) - self._rec_loaded + items_to_fetch = min(self.PAGE_SIZE, remainder) + self.beginInsertRows(index, + self._rec_loaded, + self._rec_loaded + items_to_fetch - 1) + + self._add_page_records(self.local_site, self.remote_site, self._buffer) + + self.endInsertRows() + + self.numberPopulated.emit(items_to_fetch) # ?? + + def sort(self, index, order): + """ + Summary sort per representation. + + Sort is happening on a DB side, model is reset, db queried + again. + + Args: + index (int): column index + order (int): 0| + """ + # limit unwanted first re-sorting by view + if index < 0: + return + + self._rec_loaded = 0 + if order == 0: + order = 1 + else: + order = -1 + + self.sort = {self.SORT_BY_COLUMN[index]: order, '_id': 1} + self.query = self.get_default_query() + + representations = self.dbcon.aggregate(self.query) + self.refresh(representations) + + def set_filter(self, filter): + """ + Adds text value filtering + + Args: + filter (str): string inputted by user + """ + self.filter = filter + self.refresh() + + def set_project(self, project): + """ + Changes project, called after project selection is changed + + Args: + project (str): name of project + """ + self._project = project + self.refresh() + + def get_index(self, id): + """ + Get index of 'id' value. + + Used for keeping selection after refresh. + + Args: + id (str): MongoDB _id + Returns: + (QModelIndex) + """ + index = None + for i in range(self.rowCount(None)): + index = self.index(i, 0) + value = self.data(index, Qt.UserRole) + if value == id: + return index + return index + + def get_default_query(self, limit=0): + """ + Returns basic aggregate query for main table. + + Main table provides summary information about representation, + which could have multiple files. Details are accessible after + double click on representation row. + Columns: + 'created_dt' - max of created or updated (when failed) per repr + 'sync_dt' - same for remote side + 'local_site' - progress of repr on local side, 1 = finished + 'remote_site' - progress on remote side, calculates from files + 'state' - + 0 - in progress + 1 - failed + 2 - queued + 3 - paused (not implemented yet) + 4 - finished on both sides + + are calculated and must be calculated in DB because of + pagination + + Args: + limit (int): how many records should be returned, by default + it 'PAGE_SIZE' for performance. + Should be overridden by value of loaded records for refresh + functionality (got more records by scrolling, refresh + shouldn't reset that) + """ + if limit == 0: + limit = SyncRepresentationModel.PAGE_SIZE + + return [ + {"$match": self._get_match_part()}, + {'$unwind': '$files'}, + # merge potentially unwinded records back to single per repre + {'$addFields': { + 'order_remote': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.remote_site]} + }}, + 'order_local': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.local_site]} + }} + }}, + {'$addFields': { + # prepare progress per file, presence of 'created_dt' denotes + # successfully finished load/download + 'progress_remote': {'$first': { + '$cond': [{'$size': "$order_remote.progress"}, + "$order_remote.progress", + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}}, + 'progress_local': {'$first': { + '$cond': [{'$size': "$order_local.progress"}, + "$order_local.progress", + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}}, + # file might be successfully created or failed, not both + 'updated_dt_remote': {'$first': { + '$cond': [{'$size': "$order_remote.created_dt"}, + "$order_remote.created_dt", + {'$cond': [ + {'$size': "$order_remote.last_failed_dt"}, + "$order_remote.last_failed_dt", + [] + ]} + ]}}, + 'updated_dt_local': {'$first': { + '$cond': [{'$size': "$order_local.created_dt"}, + "$order_local.created_dt", + {'$cond': [ + {'$size': "$order_local.last_failed_dt"}, + "$order_local.last_failed_dt", + [] + ]} + ]}}, + 'files_size': {'$ifNull': ["$files.size", 0]}, + 'failed_remote': { + '$cond': [{'$size': "$order_remote.last_failed_dt"}, + 1, + 0]}, + 'failed_local': { + '$cond': [{'$size': "$order_local.last_failed_dt"}, + 1, + 0]} + }}, + {'$group': { + '_id': '$_id', + # pass through context - same for representation + 'context': {'$addToSet': '$context'}, + # pass through files as a list + 'files': {'$addToSet': '$files'}, + # count how many files + 'files_count': {'$sum': 1}, + 'files_size': {'$sum': '$files_size'}, + # sum avg progress, finished = 1 + 'avg_progress_remote': {'$avg': "$progress_remote"}, + 'avg_progress_local': {'$avg': "$progress_local"}, + # select last touch of file + 'updated_dt_remote': {'$max': "$updated_dt_remote"}, + 'failed_remote': {'$sum': '$failed_remote'}, + 'failed_local': {'$sum': '$failed_local'}, + 'updated_dt_local': {'$max': "$updated_dt_local"} + }}, + {"$limit": limit}, + {"$skip": self._rec_loaded}, + {"$project": self.projection}, + {"$sort": self.sort} + ] + + def _get_match_part(self): + """ + Extend match part with filter if present. + + Filter is set by user input. Each model has different fields to be + checked. + If performance issues are found, '$text' and text indexes should + be investigated. + """ + if not self.filter: + return { + "type": "representation", + 'files.sites': { + '$elemMatch': { + '$or': [ + {'name': self.local_site}, + {'name': self.remote_site} + ] + } + } + } + else: + regex_str = '.*{}.*'.format(self.filter) + return { + "type": "representation", + '$or': [ + {'context.subset': {'$regex': regex_str, '$options': 'i'}}, + {'context.asset': {'$regex': regex_str, '$options': 'i'}}, + {'context.representation': {'$regex': regex_str, + '$options': 'i'}}], + 'files.sites': { + '$elemMatch': { + '$or': [ + {'name': self.local_site}, + {'name': self.remote_site} + ] + } + } + } + + def get_default_projection(self): + """ + Projection part for aggregate query. + + All fields with '1' will be returned, no others. + + Returns: + (dict) + """ + return { + "context.subset": 1, + "context.asset": 1, + "context.version": 1, + "context.representation": 1, + "files": 1, + 'files_count': 1, + "files_size": 1, + 'avg_progress_remote': 1, + 'avg_progress_local': 1, + 'updated_dt_remote': 1, + 'updated_dt_local': 1, + 'status': { + '$switch': { + 'branches': [ + { + 'case': { + '$or': [{'$eq': ['$avg_progress_remote', 0]}, + {'$eq': ['$avg_progress_local', 0]}]}, + 'then': 2 # Queued + }, + { + 'case': { + '$or': ['$failed_remote', '$failed_local']}, + 'then': 1 # Failed + }, + { + 'case': {'$or': [{'$and': [ + {'$gt': ['$avg_progress_remote', 0]}, + {'$lt': ['$avg_progress_remote', 1]} + ]}, + {'$and': [ + {'$gt': ['$avg_progress_local', 0]}, + {'$lt': ['$avg_progress_local', 1]} + ]} + ]}, + 'then': 0 # In progress + }, + { + 'case': {'$eq': ['dummy_placeholder', 'paused']}, + 'then': 3 # Paused + }, + { + 'case': {'$and': [ + {'$eq': ['$avg_progress_remote', 1]}, + {'$eq': ['$avg_progress_local', 1]} + ]}, + 'then': 4 # Synced OK + }, + ], + 'default': -1 + } + } + } + + +class SyncServerDetailWindow(QtWidgets.QDialog): + def __init__(self, sync_server, _id, project, parent=None): + log.debug( + "!!! SyncServerDetailWindow _id:: {}".format(_id)) + super(SyncServerDetailWindow, self).__init__(parent) + self.setWindowFlags(QtCore.Qt.Window) + self.setFocusPolicy(QtCore.Qt.StrongFocus) + + self.setStyleSheet(style.load_stylesheet()) + self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) + self.resize(1000, 400) + + body = QtWidgets.QWidget() + footer = QtWidgets.QWidget() + footer.setFixedHeight(20) + + container = SyncRepresentationDetailWidget(sync_server, _id, project, + parent=self) + body_layout = QtWidgets.QHBoxLayout(body) + body_layout.addWidget(container) + body_layout.setContentsMargins(0, 0, 0, 0) + + message = QtWidgets.QLabel() + message.hide() + + footer_layout = QtWidgets.QVBoxLayout(footer) + footer_layout.addWidget(message) + footer_layout.setContentsMargins(0, 0, 0, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(body) + layout.addWidget(footer) + + self.setLayout(body_layout) + self.setWindowTitle("Sync Representation Detail") + + +class SyncRepresentationDetailWidget(QtWidgets.QWidget): + """ + Widget to display list of synchronizable files for single repre. + + Args: + _id (str): representation _id + project (str): name of project with repre + parent (QDialog): SyncServerDetailWindow + """ + active_changed = QtCore.Signal() # active index changed + + default_widths = ( + ("file", 290), + ("created_dt", 120), + ("sync_dt", 120), + ("local_site", 60), + ("remote_site", 60), + ("size", 60), + ("priority", 20), + ("state", 90) + ) + + def __init__(self, sync_server, _id=None, project=None, parent=None): + super(SyncRepresentationDetailWidget, self).__init__(parent) + + self.representation_id = _id + self.item = None # set to item that mouse was clicked over + + self.sync_server = sync_server + + self._selected_id = None + + self.filter = QtWidgets.QLineEdit() + self.filter.setPlaceholderText("Filter representation..") + + top_bar_layout = QtWidgets.QHBoxLayout() + top_bar_layout.addWidget(self.filter) + + self.table_view = QtWidgets.QTableView() + headers = [item[0] for item in self.default_widths] + + model = SyncRepresentationDetailModel(sync_server, headers, _id, + project) + self.table_view.setModel(model) + self.table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + self.table_view.setSelectionMode( + QtWidgets.QAbstractItemView.SingleSelection) + self.table_view.setSelectionBehavior( + QtWidgets.QTableView.SelectRows) + self.table_view.horizontalHeader().setSortIndicator(-1, + Qt.AscendingOrder) + self.table_view.setSortingEnabled(True) + self.table_view.setAlternatingRowColors(True) + self.table_view.verticalHeader().hide() + + time_delegate = PrettyTimeDelegate(self) + column = self.table_view.model().get_header_index("created_dt") + self.table_view.setItemDelegateForColumn(column, time_delegate) + column = self.table_view.model().get_header_index("sync_dt") + self.table_view.setItemDelegateForColumn(column, time_delegate) + + column = self.table_view.model().get_header_index("local_site") + delegate = ImageDelegate(self) + self.table_view.setItemDelegateForColumn(column, delegate) + + column = self.table_view.model().get_header_index("remote_site") + delegate = ImageDelegate(self) + self.table_view.setItemDelegateForColumn(column, delegate) + + column = self.table_view.model().get_header_index("size") + delegate = SizeDelegate(self) + self.table_view.setItemDelegateForColumn(column, delegate) + + for column_name, width in self.default_widths: + idx = model.get_header_index(column_name) + self.table_view.setColumnWidth(idx, width) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addLayout(top_bar_layout) + layout.addWidget(self.table_view) + + self.filter.textChanged.connect(lambda: model.set_filter( + self.filter.text())) + self.table_view.customContextMenuRequested.connect( + self._on_context_menu) + + self.table_view.model().modelReset.connect(self._set_selection) + + self.selection_model = self.table_view.selectionModel() + self.selection_model.selectionChanged.connect(self._selection_changed) + + def _selection_changed(self): + index = self.selection_model.currentIndex() + self._selected_id = self.table_view.model().data(index, Qt.UserRole) + + def _set_selection(self): + """ + Sets selection to 'self._selected_id' if exists. + + Keep selection during model refresh. + """ + if self._selected_id: + index = self.table_view.model().get_index(self._selected_id) + if index.isValid(): + mode = QtCore.QItemSelectionModel.Select | \ + QtCore.QItemSelectionModel.Rows + self.selection_model.setCurrentIndex(index, mode) + else: + self._selected_id = None + + def _show_detail(self): + """ + Shows windows with error message for failed sync of a file. + """ + dt = max(self.item.created_dt, self.item.sync_dt) + detail_window = SyncRepresentationErrorWindow(self.item._id, + self.project, + dt, + self.item.tries, + self.item.error) + detail_window.exec() + + def _on_context_menu(self, point): + """ + Shows menu with loader actions on Right-click. + """ + point_index = self.table_view.indexAt(point) + if not point_index.isValid(): + return + + self.item = self.table_view.model()._data[point_index.row()] + + menu = QtWidgets.QMenu() + actions_mapping = {} + + if self.item.state == STATUS[1]: + action = QtWidgets.QAction("Open error detail") + actions_mapping[action] = self._show_detail + menu.addAction(action) + + remote_site, remote_progress = self.item.remote_site.split() + if remote_progress == '1': + action = QtWidgets.QAction("Reset local site") + actions_mapping[action] = self._reset_local_site + menu.addAction(action) + + local_site, local_progress = self.item.local_site.split() + if local_progress == '1': + action = QtWidgets.QAction("Reset remote site") + actions_mapping[action] = self._reset_remote_site + menu.addAction(action) + + if not actions_mapping: + action = QtWidgets.QAction("< No action >") + actions_mapping[action] = None + menu.addAction(action) + + result = menu.exec_(QtGui.QCursor.pos()) + if result: + to_run = actions_mapping[result] + if to_run: + to_run() + + def _reset_local_site(self): + """ + Removes errors or success metadata for particular file >> forces + redo of upload/download + """ + self.sync_server.reset_provider_for_file( + self.table_view.model()._project, + self.representation_id, + self.item._id, + 'local') + + def _reset_remote_site(self): + """ + Removes errors or success metadata for particular file >> forces + redo of upload/download + """ + self.sync_server.reset_provider_for_file( + self.table_view.model()._project, + self.representation_id, + self.item._id, + 'remote') + + +class SyncRepresentationDetailModel(QtCore.QAbstractTableModel): + """ + List of all syncronizable files per single representation. + """ + PAGE_SIZE = 30 + # TODO add filter filename + DEFAULT_SORT = { + "files.path": 1 + } + SORT_BY_COLUMN = [ + "files.path", + "updated_dt_local", # local created_dt + "updated_dt_remote", # remote created_dt + "progress_local", # local progress + "progress_remote", # remote progress + "size", # remote progress + "context.asset", # priority TODO + "status" # state + ] + + @attr.s + class SyncRepresentationDetail: + """ + Auxiliary object for easier handling. + + Fields must contain all header values (+ any arbitrary values). + """ + _id = attr.ib() + file = attr.ib() + created_dt = attr.ib(default=None) + sync_dt = attr.ib(default=None) + local_site = attr.ib(default=None) + remote_site = attr.ib(default=None) + size = attr.ib(default=None) + priority = attr.ib(default=None) + state = attr.ib(default=None) + tries = attr.ib(default=None) + error = attr.ib(default=None) + + def __init__(self, sync_server, header, _id, project=None): + super(SyncRepresentationDetailModel, self).__init__() + self._header = header + self._data = [] + self._project = project + self._rec_loaded = 0 + self.filter = None + self._buffer = [] # stash one page worth of records (actually cursor) + self._id = _id + self._initialized = False + + self.sync_server = sync_server + # TODO think about admin mode + # this is for regular user, always only single local and single remote + self.local_site, self.remote_site = \ + self.sync_server.get_sites_for_project(self._project) + + self.sort = self.DEFAULT_SORT + + # in case we would like to hide/show some columns + self.projection = self.get_default_projection() + + self.query = self.get_default_query() + representations = self.dbcon.aggregate(self.query) + self.refresh(representations) + + self.timer = QtCore.QTimer() + self.timer.timeout.connect(self.tick) + self.timer.start(SyncRepresentationModel.REFRESH_SEC) + + @property + def dbcon(self): + return self.sync_server.connection.database[self._project] + + def tick(self): + self.refresh(representations=None, load_records=self._rec_loaded) + self.timer.start(SyncRepresentationModel.REFRESH_SEC) + + def get_header_index(self, value): + """ + Returns index of 'value' in headers + + Args: + value (str): header name value + Returns: + (int) + """ + return self._header.index(value) + + def data(self, index, role): + item = self._data[index.row()] + if role == Qt.DisplayRole: + return attr.asdict(item)[self._header[index.column()]] + if role == Qt.UserRole: + return item._id + + def rowCount(self, index): + return len(self._data) + + def columnCount(self, index): + return len(self._header) + + def headerData(self, section, orientation, role): + if role == Qt.DisplayRole: + if orientation == Qt.Horizontal: + return str(self._header[section]) + + def refresh(self, representations=None, load_records=0): + self.beginResetModel() + self._data = [] + self._rec_loaded = 0 + + if not representations: + self.query = self.get_default_query(load_records) + representations = self.dbcon.aggregate(self.query) + + self._add_page_records(self.local_site, self.remote_site, + representations) + self.endResetModel() + + def _add_page_records(self, local_site, remote_site, representations): + """ + Process all records from 'representation' and add them to storage. + + Args: + local_site (str): name of local site (mine) + remote_site (str): name of cloud provider (theirs) + representations (Mongo Cursor) + """ + for repre in representations: + # log.info("!!! repre:: {}".format(repre)) + files = repre.get("files", []) + if isinstance(files, dict): # aggregate returns dictionary + files = [files] + + for file in files: + local_updated = remote_updated = None + if repre.get('updated_dt_local'): + local_updated = \ + repre.get('updated_dt_local').strftime( + "%Y%m%dT%H%M%SZ") + + if repre.get('updated_dt_remote'): + remote_updated = \ + repre.get('updated_dt_remote').strftime( + "%Y%m%dT%H%M%SZ") + + progress_remote = repre.get('progress_remote', '') + progress_local = repre.get('progress_local', '') + + errors = [] + if repre.get('failed_remote_error'): + errors.append(repre.get('failed_remote_error')) + if repre.get('failed_local_error'): + errors.append(repre.get('failed_local_error')) + + item = self.SyncRepresentationDetail( + file.get("_id"), + os.path.basename(file["path"]), + local_updated, + remote_updated, + '{} {}'.format(local_site, progress_local), + '{} {}'.format(remote_site, progress_remote), + file.get('size', 0), + 1, + STATUS[repre.get("status", -1)], + repre.get("tries"), + '\n'.join(errors) + ) + self._data.append(item) + self._rec_loaded += 1 + + def canFetchMore(self, index): + """ + Check if there are more records than currently loaded + """ + # 'skip' might be suboptimal when representation hits 500k+ + self._buffer = list(self.dbcon.aggregate(self.query)) + return len(self._buffer) > self._rec_loaded + + def fetchMore(self, index): + """ + Add more record to model. + + Called when 'canFetchMore' returns true, which means there are + more records in DB than loaded. + 'self._buffer' is used to stash cursor to limit requery + """ + log.debug("fetchMore") + # cursor.count() returns always total number, not only skipped + limit + remainder = len(self._buffer) - self._rec_loaded + items_to_fetch = min(self.PAGE_SIZE, remainder) + + self.beginInsertRows(index, + self._rec_loaded, + self._rec_loaded + items_to_fetch - 1) + self._add_page_records(self.local_site, self.remote_site, self._buffer) + + self.endInsertRows() + + def sort(self, index, order): + # limit unwanted first re-sorting by view + if index < 0: + return + + self._rec_loaded = 0 # change sort - reset from start + + if order == 0: + order = 1 + else: + order = -1 + + self.sort = {self.SORT_BY_COLUMN[index]: order} + self.query = self.get_default_query() + + representations = self.dbcon.aggregate(self.query) + self.refresh(representations) + + def set_filter(self, filter): + self.filter = filter + self.refresh() + + def get_index(self, id): + """ + Get index of 'id' value. + + Used for keeping selection after refresh. + + Args: + id (str): MongoDB _id + Returns: + (QModelIndex) + """ + index = None + for i in range(self.rowCount(None)): + index = self.index(i, 0) + value = self.data(index, Qt.UserRole) + if value == id: + return index + return index + + def get_default_query(self, limit=0): + """ + Gets query that gets used when no extra sorting, filtering or + projecting is needed. + + Called for basic table view. + + Returns: + [(dict)] - list with single dict - appropriate for aggregate + function for MongoDB + """ + if limit == 0: + limit = SyncRepresentationModel.PAGE_SIZE + + return [ + {"$match": self._get_match_part()}, + {"$unwind": "$files"}, + {'$addFields': { + 'order_remote': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.remote_site]} + }}, + 'order_local': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.local_site]} + }} + }}, + {'$addFields': { + # prepare progress per file, presence of 'created_dt' denotes + # successfully finished load/download + 'progress_remote': {'$first': { + '$cond': [{'$size': "$order_remote.progress"}, + "$order_remote.progress", + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}}, + 'progress_local': {'$first': { + '$cond': [{'$size': "$order_local.progress"}, + "$order_local.progress", + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}}, + # file might be successfully created or failed, not both + 'updated_dt_remote': {'$first': { + '$cond': [ + {'$size': "$order_remote.created_dt"}, + "$order_remote.created_dt", + { + '$cond': [ + {'$size': "$order_remote.last_failed_dt"}, + "$order_remote.last_failed_dt", + [] + ] + } + ] + }}, + 'updated_dt_local': {'$first': { + '$cond': [ + {'$size': "$order_local.created_dt"}, + "$order_local.created_dt", + { + '$cond': [ + {'$size': "$order_local.last_failed_dt"}, + "$order_local.last_failed_dt", + [] + ] + } + ] + }}, + 'failed_remote': { + '$cond': [{'$size': "$order_remote.last_failed_dt"}, + 1, + 0]}, + 'failed_local': { + '$cond': [{'$size': "$order_local.last_failed_dt"}, + 1, + 0]}, + 'failed_remote_error': {'$first': { + '$cond': [{'$size': "$order_remote.error"}, + "$order_remote.error", + [""]]}}, + 'failed_local_error': {'$first': { + '$cond': [{'$size': "$order_local.error"}, + "$order_local.error", + [""]]}}, + 'tries': {'$first': { + '$cond': [{'$size': "$order_local.tries"}, + "$order_local.tries", + {'$cond': [ + {'$size': "$order_remote.tries"}, + "$order_remote.tries", + [] + ]} + ]}} + }}, + {"$limit": limit}, + {"$skip": self._rec_loaded}, + {"$project": self.projection}, + {"$sort": self.sort} + ] + + def _get_match_part(self): + """ + Returns different content for 'match' portion if filtering by + name is present + + Returns: + (dict) + """ + if not self.filter: + return { + "type": "representation", + "_id": self._id + } + else: + regex_str = '.*{}.*'.format(self.filter) + return { + "type": "representation", + "_id": self._id, + '$or': [{'files.path': {'$regex': regex_str, '$options': 'i'}}] + } + + def get_default_projection(self): + """ + Projection part for aggregate query. + + All fields with '1' will be returned, no others. + + Returns: + (dict) + """ + return { + "files": 1, + 'progress_remote': 1, + 'progress_local': 1, + 'updated_dt_remote': 1, + 'updated_dt_local': 1, + 'failed_remote_error': 1, + 'failed_local_error': 1, + 'tries': 1, + 'status': { + '$switch': { + 'branches': [ + { + 'case': { + '$or': [{'$eq': ['$progress_remote', 0]}, + {'$eq': ['$progress_local', 0]}]}, + 'then': 2 # Queued + }, + { + 'case': { + '$or': ['$failed_remote', '$failed_local']}, + 'then': 1 # Failed + }, + { + 'case': {'$or': [{'$and': [ + {'$gt': ['$progress_remote', 0]}, + {'$lt': ['$progress_remote', 1]} + ]}, + {'$and': [ + {'$gt': ['$progress_local', 0]}, + {'$lt': ['$progress_local', 1]} + ]} + ]}, + 'then': 0 # In Progress + }, + { + 'case': {'$eq': ['dummy_placeholder', 'paused']}, + 'then': 3 + }, + { + 'case': {'$and': [ + {'$eq': ['$progress_remote', 1]}, + {'$eq': ['$progress_local', 1]} + ]}, + 'then': 4 # Synced OK + }, + ], + 'default': -1 + } + } + } + + +class ImageDelegate(QtWidgets.QStyledItemDelegate): + """ + Prints icon of site and progress of synchronization + """ + + def __init__(self, parent=None): + super(ImageDelegate, self).__init__(parent) + self.icons = {} + + def paint(self, painter, option, index): + option = QtWidgets.QStyleOptionViewItem(option) + option.showDecorationSelected = True + + if (option.showDecorationSelected and + (option.state & QtWidgets.QStyle.State_Selected)): + painter.setOpacity(0.20) # highlight color is a bit off + painter.fillRect(option.rect, + option.palette.highlight()) + painter.setOpacity(1) + + d = index.data(QtCore.Qt.DisplayRole) + if d: + provider, value = d.split() + else: + return + + if not self.icons.get(provider): + resource_path = os.path.dirname(__file__) + resource_path = os.path.join(resource_path, "..", + "providers", "resources") + pix_url = "{}/{}.png".format(resource_path, provider) + pixmap = QtGui.QPixmap(pix_url) + self.icons[provider] = pixmap + else: + pixmap = self.icons[provider] + + point = QtCore.QPoint(option.rect.x() + + (option.rect.width() - pixmap.width()) / 2, + option.rect.y() + + (option.rect.height() - pixmap.height()) / 2) + painter.drawPixmap(point, pixmap) + + painter.setOpacity(0.5) + overlay_rect = option.rect + overlay_rect.setHeight(overlay_rect.height() * (1.0 - float(value))) + painter.fillRect(overlay_rect, + QtGui.QBrush(QtGui.QColor(0, 0, 0, 200))) + painter.setOpacity(1) + + +class SyncRepresentationErrorWindow(QtWidgets.QDialog): + def __init__(self, _id, project, dt, tries, msg, parent=None): + super(SyncRepresentationErrorWindow, self).__init__(parent) + self.setWindowFlags(QtCore.Qt.Window) + self.setFocusPolicy(QtCore.Qt.StrongFocus) + + self.setStyleSheet(style.load_stylesheet()) + self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) + self.resize(250, 200) + + body = QtWidgets.QWidget() + footer = QtWidgets.QWidget() + footer.setFixedHeight(20) + + container = SyncRepresentationErrorWidget(_id, project, dt, tries, msg, + parent=self) + body_layout = QtWidgets.QHBoxLayout(body) + body_layout.addWidget(container) + body_layout.setContentsMargins(0, 0, 0, 0) + + message = QtWidgets.QLabel() + message.hide() + + footer_layout = QtWidgets.QVBoxLayout(footer) + footer_layout.addWidget(message) + footer_layout.setContentsMargins(0, 0, 0, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(body) + layout.addWidget(footer) + + self.setLayout(body_layout) + self.setWindowTitle("Sync Representation Error Detail") + + +class SyncRepresentationErrorWidget(QtWidgets.QWidget): + """ + Dialog to show when sync error happened, prints error message + """ + + def __init__(self, _id, project, dt, tries, msg, parent=None): + super(SyncRepresentationErrorWidget, self).__init__(parent) + + layout = QtWidgets.QFormLayout(self) + layout.addRow(QtWidgets.QLabel("Last update date"), + QtWidgets.QLabel(pretty_timestamp(dt))) + layout.addRow(QtWidgets.QLabel("Retries"), + QtWidgets.QLabel(str(tries))) + layout.addRow(QtWidgets.QLabel("Error message"), + QtWidgets.QLabel(msg)) + + +class SizeDelegate(QtWidgets.QStyledItemDelegate): + """ + Pretty print for file size + """ + + def __init__(self, parent=None): + super(SizeDelegate, self).__init__(parent) + + def displayText(self, value, locale): + if value is None: + # Ignore None value + return + + return self._pretty_size(value) + + def _pretty_size(self, value, suffix='B'): + for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: + if abs(value) < 1024.0: + return "%3.1f%s%s" % (value, unit, suffix) + value /= 1024.0 + return "%.1f%s%s" % (value, 'Yi', suffix) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 2e5a9e5b94..5ba92435fd 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -908,7 +908,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): file_info = self.prepare_file_info(path, integrated_file_sizes[dest], - file_hash) + file_hash, + instance=instance) output_resources.append(file_info) return output_resources @@ -928,7 +929,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dest += '.{}'.format(self.TMP_FILE_EXT) return dest - def prepare_file_info(self, path, size=None, file_hash=None, sites=None): + def prepare_file_info(self, path, size=None, file_hash=None, + sites=None, instance=None): """ Prepare information for one file (asset or resource) Arguments: @@ -938,6 +940,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): sites(optional): array of published locations, [ {'name':'studio', 'created_dt':date} by default keys expected ['studio', 'site1', 'gdrive1'] + instance(dict, optional): to get collected settings Returns: rec: dictionary with filled info """ @@ -945,15 +948,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): remote_site = None sync_server_presets = None - # manager = ModulesManager() - # sync_server = manager.modules_by_name["sync_server"] - # try: - # if sync_server.enabled: - # local_site, remote_site = sync_server.get_sites_for_project() - # except ValueError: - # log.debug(("There are not set presets for SyncServer." - # " No credentials provided, no synching possible"). - # format(str(sync_server_presets))) + if (instance.context.data["system_settings"] + ["modules"] + ["sync_server"] + ["enabled"]): + sync_server_presets = (instance.context.data["project_settings"] + ["global"] + ["sync_server"]) + + if sync_server_presets["enabled"]: + local_site = sync_server_presets["config"].\ + get("active_site", "studio").strip() + remote_site = sync_server_presets["config"].get("remote_site") rec = { "_id": io.ObjectId(), diff --git a/pype/tools/settings/settings/style/style.css b/pype/tools/settings/settings/style/style.css index f3eb3a258e..3ce9837a8b 100644 --- a/pype/tools/settings/settings/style/style.css +++ b/pype/tools/settings/settings/style/style.css @@ -353,3 +353,39 @@ QScrollBar::up-arrow:vertical, QScrollBar::down-arrow:vertical { QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { background: none; } + +QTableView +{ + border: 1px solid #444; + gridline-color: #6c6c6c; + background-color: #201F1F; + alternate-background-color:#21252B; +} + +QHeaderView +{ + border: 1px transparent; + border-radius: 2px; + margin: 0px; + padding: 0px; +} + +QHeaderView::section { + background-color: #21252B; + /*color: silver;*/ + padding: 4px; + border: 1px solid #6c6c6c; + border-radius: 0px; + text-align: center; + color: #969b9e; + font-weight: bold; +} + +QTableView::item:pressed, QListView::item:pressed, QTreeView::item:pressed { + background: #78879b; + color: #FFFFFF; +} + +QTableView::item:selected:active, QTreeView::item:selected:active, QListView::item:selected:active { + background: #3d8ec9; +} \ No newline at end of file