Merge pull request #796 from pypeclub/3.0/fix_sync_server_settings

3.0.fix sync server settings
This commit is contained in:
Milan Kolar 2020-12-16 21:32:50 +01:00 committed by GitHub
commit de2a7be94f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 442 additions and 185 deletions

View file

@ -34,6 +34,7 @@ from .logging import LoggingModule
from .muster import MusterModule
from .standalonepublish import StandAlonePublishModule
from .websocket_server import WebsocketModule
from .sync_server import SyncServer
__all__ = (
@ -69,5 +70,6 @@ __all__ = (
"MusterModule",
"StandAlonePublishModule",
"WebsocketModule"
"WebsocketModule",
"SyncServer"
)

View file

@ -17,61 +17,74 @@ Quick HOWTOs:
I want to start syncing my newly published files:
------------------------------------------------
Get credentials for service account, share target folder on Gdrive with it
Set path to stored credentils file in gdrive.json
Set name of site, root folder in gdrive.json
Update config.json/remote_site to name of site you set in previous step
Start Pype and publish
- Check that Sync server is enabled globally in
`pype/settings/defaults/system_settings/modules.json`
- Get credentials for service account, share target folder on Gdrive with it
- Set path to stored credentials file in
`pype/settings/defaults/project_settings/global.json`.`credentials_url`
- Set name of site, root folder and provider('gdrive' in case of Google Drive) in
`pype/settings/defaults/project_settings/global.json`.`sites`
- Update `pype/settings/defaults/project_settings/global.json`.`remote_site`
to name of site you set in previous step.
- Check that project setting is enabled (in this `global.json` file)
- Start Pype and publish
My published file is not syncing:
--------------------------------
Check that representation record contains for all 'files.site' skeleton in
format: {name: "MY_CONFIGURED_REMOTE_SITE"}
Check if that record doesn't have already 'created_dt' filled. That would
- Check that representation record contains for all 'files.sites' skeleton in
format: `{name: "MY_CONFIGURED_REMOTE_SITE"}`
- Check if that record doesn't have already 'created_dt' filled. That would
denote that file was synced but someone might have had removed it on remote
site.
If that records contains field "error", check that "tries" field doesn't
- If that records contains field "error", check that "tries" field doesn't
contain same value as threshold in config.json.retry_cnt. If it does fix
the problem mentioned in 'error' field, delete 'tries' field.
I want to sync my already published files:
-----------------------------------------
Configure your Pype for syncing (see first section of Howtos).
Manually add skeleton {name: "MY_CONFIGURED_REMOTE_SITE"} to all
- Configure your Pype for syncing (see first section of Howtos).
- Manually add skeleton {name: "MY_CONFIGURED_REMOTE_SITE"} to all
representation.files.sites:
db.getCollection('MY_PROJECT').update({type:"representation"},
{$set:{"files.$[].sites.MY_CONFIGURED_REMOTE_SITE" : {}}}, true, true)
`db.getCollection('MY_PROJECT').update({type:"representation"},
{$set:{"files.$[].sites.MY_CONFIGURED_REMOTE_SITE" : {}}}, true, true)`
Needed configuration:
--------------------
pype-config/presets/config.json:
"local_id": "local_0", -- identifier of user pype
"retry_cnt": 3, -- how many times try to synch file in case of error
"loop_delay": 60, -- how many seconds between sync loops
"active_site": "studio", -- which site user current, 'studio' by default,
`pype/settings/defaults/project_settings/global.json`.`sync_server`:
- `"local_id": "local_0",` -- identifier of user pype
- `"retry_cnt": 3,` -- how many times try to synch file in case of error
- `"loop_delay": 60,` -- how many seconds between sync loops
- `"active_site": "studio",` -- which site user current, 'studio' by default,
could by same as 'local_id' if user is working
from home without connection to studio
infrastructure
"remote_site": "gdrive" -- key for site to synchronize to. Must match to site
configured in 'gdrive.json'.
- `"remote_site": "gdrive"` -- key for site to synchronize to. Must match to site
configured lower in this file.
Used in IntegrateNew to prepare skeleton for
syncing in the representation record.
Leave empty if no syncing is wanted.
This is a general configuration, 'local_id', 'active_site' and 'remote_site'
will be set and changed by some GUI in the future.
pype-config/presets/gdrive.json:
"gdrive": { - site name, must be unique
"credentials_url": "/my_secret_folder/credentials.json",
`pype/settings/defaults/project_settings/global.json`.`sync_server`.`sites`:
```- "gdrive": { - site name, must be unique
- "provider": "gdrive" -- type of provider, must be registered in 'sync_server\providers\lib.py'
- "credentials_url": "/my_secret_folder/credentials.json",
-- path to credentials for service account
"root": { -- "root": "/My Drive" in simple scenario, config here for
- "root": { -- "root": "/My Drive" in simple scenario, config here for
-- multiroot projects
"root_one": "/My Drive/work_folder",
"root_tow": "/My Drive/publish_folder"
}
}
- "root_one": "/My Drive/work_folder",
- "root_tow": "/My Drive/publish_folder"
}
}``

View file

@ -1,4 +1,4 @@
from .sync_server import SyncServer
from pype.modules.sync_server.sync_server import SyncServer
def tray_init(tray_widget, main_widget):

View file

@ -6,7 +6,7 @@ from googleapiclient import errors
from .abstract_provider import AbstractProvider
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
from pype.api import Logger
from pype.api import config
from pype.api import get_system_settings
from ..utils import time_function
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly',
@ -25,10 +25,14 @@ class GDriveHandler(AbstractProvider):
slow and should be run only when necessary. Currently is set to
lazy creation, created only after first call when necessary.
Configuration for provider is in pype-config/presets/gdrive.json
Configuration for provider is in
'settings/defaults/project_settings/global.json'
Settings could be overwritten per project.
Example of config:
"gdrive": { - site name
"provider": "gdrive", - type of provider, label must be registered
"credentials_url": "/my_secret_folder/credentials.json",
"root": { - could be "root": "/My Drive" for single root
"root_one": "/My Drive",
@ -39,12 +43,12 @@ class GDriveHandler(AbstractProvider):
FOLDER_STR = 'application/vnd.google-apps.folder'
MY_DRIVE_STR = 'My Drive' # name of root folder of regular Google drive
def __init__(self, site_name, tree=None):
def __init__(self, site_name, tree=None, presets=None):
self.presets = None
self.active = False
self.site_name = site_name
self.presets = self.get_presets().get(site_name, None)
self.presets = presets
if not self.presets:
log.info("Sync Server: There are no presets for {}.".
format(site_name))
@ -597,7 +601,12 @@ class GDriveHandler(AbstractProvider):
"""
provider_presets = None
try:
provider_presets = config.get_presets()["sync_server"]["gdrive"]
provider_presets = (
get_system_settings()["modules"]
["sync_server"]
["providers"]
["gdrive"]
)
except KeyError:
log.info(("Sync Server: There are no presets for Gdrive " +
"provider.").

View file

@ -29,7 +29,7 @@ class ProviderFactory:
"""
self.providers[provider] = (creator, batch_limit)
def get_provider(self, provider, site_name, tree=None):
def get_provider(self, provider, site_name, tree=None, presets=None):
"""
Returns new instance of provider client for specific site.
One provider could have multiple sites.
@ -42,11 +42,13 @@ class ProviderFactory:
site_name (string): descriptor of site, different service accounts
must have different site name
tree (dictionary): - folder paths to folder id structure
presets (dictionary): config for provider and site (eg.
"credentials_url"..)
Returns:
(implementation of AbstractProvider)
"""
creator_info = self._get_creator_info(provider)
site = creator_info[0](site_name, tree) # call init
site = creator_info[0](site_name, tree, presets) # call init
return site
@ -68,10 +70,16 @@ class ProviderFactory:
def _get_creator_info(self, provider):
"""
Collect all necessary info for provider. Currently only creator
class and batch limit
class and batch limit.
Args:
provider (string): 'gdrive' etc
Returns:
(tuple): (creator, batch_limit)
creator is class of a provider (ex: GDriveHandler)
batch_limit denotes how many files synced at single loop
its provided via 'register_provider' as its needed even
before provider class is initialized itself
(setting it as a class variable didn't work)
"""
creator_info = self.providers.get(provider)
if not creator_info:
@ -81,4 +89,8 @@ class ProviderFactory:
factory = ProviderFactory()
# this says that there is implemented provider with a label 'gdrive'
# there is implementing 'GDriveHandler' class
# 7 denotes number of files that could be synced in single loop - learned by
# trial and error
factory.register_provider('gdrive', GDriveHandler, 7)

View file

@ -1,7 +1,8 @@
from pype.api import config, Logger
from pype.api import (
get_project_settings,
get_current_project_settings)
import threading
import asyncio
import concurrent.futures
from concurrent.futures._base import CancelledError
@ -10,12 +11,21 @@ from datetime import datetime
from .providers import lib
import os
from avalon import io
from bson.objectid import ObjectId
from avalon.api import AvalonMongoDB
from .utils import time_function
log = Logger().get_logger("SyncServer")
import six
from pype.lib import PypeLogger
from .. import PypeModule, ITrayService
if six.PY2:
web = asyncio = STATIC_DIR = WebSocketAsync = None
else:
import asyncio
log = PypeLogger().get_logger("SyncServer")
class SyncStatus(Enum):
@ -24,7 +34,7 @@ class SyncStatus(Enum):
DO_DOWNLOAD = 2
class SyncServer():
class SyncServer(PypeModule, ITrayService):
"""
Synchronization server that is syncing published files from local to
any of implemented providers (like GDrive, S3 etc.)
@ -81,90 +91,207 @@ class SyncServer():
# different limits imposed by its API
# set 0 to no limit
REPRESENTATION_LIMIT = 100
DEFAULT_SITE = 'studio'
name = "sync_server"
label = "Sync Server"
def initialize(self, module_settings):
"""
Called during Module Manager creation.
Collects needed data, checks asyncio presence.
Sets 'enabled' according to global settings for the module.
Shouldnt be doing any initialization, thats a job for 'tray_init'
"""
sync_server_settings = module_settings[self.name]
self.enabled = sync_server_settings["enabled"]
if asyncio is None:
raise AssertionError(
"SyncServer module requires Python 3.5 or higher."
)
# some parts of code need to run sequentially, not in async
self.lock = None
self.connection = None # connection to avalon DB to update state
self.presets = None # settings for all enabled projects for sync
self.sync_server_thread = None # asyncio requires new thread
def connect_with_modules(self, *_a, **kw):
return
def tray_init(self):
"""
Actual initialization of Sync Server.
Called when tray is initialized, it checks if module should be
enabled. If not, no initialization necessary.
"""
if not self.enabled:
return
def __init__(self):
self.qaction = None
self.failed_icon = None
self._is_running = False
self.presets = None
self.lock = threading.Lock()
self.connection = AvalonMongoDB()
try:
self.presets = config.get_presets()["sync_server"]["config"]
self.presets = self.get_synced_presets()
self.set_active_sites(self.presets)
self.sync_server_thread = SyncServerThread(self)
self.active_site = self.presets["active_site"]
self.remote_site = self.presets["remote_site"]
# try to activate providers, need to have valid credentials
self.active_sites = []
for provider in lib.factory.providers.keys():
for site in lib.factory.providers[provider][0].get_presets(). \
keys():
handler = lib.factory.get_provider(provider, site)
if handler.is_active():
self.active_sites.append((provider, site))
except ValueError:
log.info("No system setting for sync. Not syncing.")
except KeyError:
log.debug(("There are not set presets for SyncServer."
" No credentials provided, no syncing possible").
format(str(self.presets)))
log.info((
"There are not set presets for SyncServer OR "
"Credentials provided are invalid, "
"no syncing possible").
format(str(self.presets)), exc_info=True)
@property
def active_site(self):
def tray_start(self):
"""
Returns active 'local' site (could be personal location on user
laptop or general 'studio' mounted disk.
Its 'mine' part of synchronization.
Triggered when Tray is started.
Checks if configuration presets are available and if there is
any provider ('gdrive', 'S3') that is activated
(eg. has valid credentials).
Returns:
(string)
None
"""
return self._active_site
if self.presets and self.active_sites:
self.sync_server_thread.start()
else:
log.info("No presets or active providers. " +
"Synchronization not possible.")
@active_site.setter
def active_site(self, value):
def tray_exit(self):
"""
Sets 'mine' part of synchronization process. It is expected only
single site is active at the time. Active site could be changed
though on different location (user working in studio has
'active_site' = 'studio', when user is at home changes
'active_site' to 'john_doe_local_001'.
Stops sync thread if running.
Called from Module Manager
"""
if not self.sync_server_thread:
return
if not self.is_running:
return
try:
log.info("Stopping sync server server")
self.sync_server_thread.is_running = False
self.sync_server_thread.stop()
except Exception:
log.warning(
"Error has happened during Killing sync server",
exc_info=True
)
@property
def is_running(self):
return self.sync_server_thread.is_running
def get_sites_for_project(self, project_name=None):
"""
Checks if sync is enabled globally and on project.
In that case return local and remote site
Args:
project_name (str):
Returns:
(tuple): of strings, labels for (local_site, remote_site)
"""
if self.enabled:
if project_name:
settings = get_project_settings(project_name)
else:
settings = get_current_project_settings()
sync_server_presets = settings["global"]["sync_server"]["config"]
if settings["global"]["sync_server"]["enabled"]:
local_site = sync_server_presets.get("active_site",
"studio").strip()
remote_site = sync_server_presets.get("remote_site")
return local_site, remote_site
return self.DEFAULT_SITE, None
def get_synced_presets(self):
"""
Collects all projects which have enabled syncing and their settings
Returns:
(dict): of settings, keys are project names
"""
sync_presets = {}
for collection in self.connection.database.collection_names(False):
sync_settings = self.get_synced_preset(collection)
if sync_settings:
sync_presets[collection] = sync_settings
if not sync_presets:
log.info("No enabled and configured projects for sync.")
return sync_presets
def get_synced_preset(self, project_name):
""" Handles pulling sync_server's settings for enabled 'project_name'
Args:
project_name (str): used in project settings
Returns:
(dict): settings dictionary for the enabled project,
empty if no settings or sync is disabled
"""
settings = get_project_settings(project_name)
sync_settings = settings.get("global")["sync_server"]
if not sync_settings:
log.info("No project setting for Sync Server, not syncing.")
return {}
if sync_settings.get("enabled"):
return sync_settings
return {}
def set_active_sites(self, settings):
"""
Sets 'self.active_sites' as a dictionary from provided 'settings'
Format:
{ 'project_name' : ('provider_name', 'site_name') }
Args:
value (string): label for site, needs to match representation's
'files.site'.keys()
Returns:
(string)
settings (dict): all enabled project sync setting (sites labesl,
retries count etc.)
"""
self._active_site = value
self.active_sites = {}
for project_name, project_setting in settings.items():
for site_name, config in project_setting.get("sites").items():
handler = lib.factory.get_provider(config["provider"],
site_name,
presets=config)
if handler.is_active():
if not self.active_sites.get('project_name'):
self.active_sites[project_name] = []
@property
def remote_site(self):
"""
Remote side of synchronization, where "to synchronize to".
Currently expected only single remote destination ('gdrive'..),
but prepared for multiple.
Denotes 'theirs' side of synchronization.
self.active_sites[project_name].append(
(config["provider"], site_name))
Returns:
(list) of strings (['gdrive'])
"""
return [self._remote_site]
if not self.active_sites:
log.info("No sync sites active, no working credentials provided")
@remote_site.setter
def remote_site(self, value):
self._remote_site = value
def get_active_sites(self, project_name):
"""
Returns active sites (provider configured and able to connect) per
project.
def get_collections(self):
Args:
project_name (str): used as a key in dict
Returns:
(dict):
Format:
{ 'project_name' : ('provider_name', 'site_name') }
"""
Returns:
(list) of strings with collection names in avalon DB
"""
return self.connection.database.collection_names(False)
return self.active_sites[project_name]
@time_function
def get_sync_representations(self, collection, active_site, remote_site):
@ -191,8 +318,7 @@ class SyncServer():
log.debug("Check representations for : {}".format(collection))
self.connection.Session["AVALON_PROJECT"] = collection
# retry_cnt - number of attempts to sync specific file before giving up
retries_arr = self._get_retries_arr()
active_providers_str = ",".join(remote_site)
retries_arr = self._get_retries_arr(collection)
query = {
"type": "representation",
"$or": [
@ -206,7 +332,7 @@ class SyncServer():
}}, {
"files.sites": {
"$elemMatch": {
"name": {"$in": [active_providers_str]},
"name": {"$in": [remote_site]},
"created_dt": {"$exists": False},
"tries": {"$in": retries_arr}
}
@ -223,7 +349,7 @@ class SyncServer():
}}, {
"files.sites": {
"$elemMatch": {
"name": {"$in": [active_providers_str]},
"name": {"$in": [remote_site]},
"created_dt": {"$exists": True}
}
}
@ -237,18 +363,19 @@ class SyncServer():
return representations
def check_status(self, file, provider_name):
def check_status(self, file, provider_name, config_preset):
"""
Check synchronization status for single 'file' of single
'representation' by single 'provider'.
(Eg. check if 'scene.ma' of lookdev.v10 should be synced to GDrive
Always is comparing local record, eg. site with
'name' == self.presets["active_site"]
'name' == self.presets[PROJECT_NAME]['config']["active_site"]
Args:
file (dictionary): of file from representation in Mongo
provider_name (string): - gdrive etc.
config_preset (dict): config about active site, retries
Returns:
(string) - one of SyncStatus
"""
@ -262,25 +389,25 @@ class SyncServer():
tries = self._get_tries_count_from_rec(provider_rec)
# file will be skipped if unsuccessfully tried over threshold
# error metadata needs to be purged manually in DB to reset
if tries < self.presets["retry_cnt"]:
if tries < int(config_preset["retry_cnt"]):
return SyncStatus.DO_UPLOAD
else:
_, local_rec = self._get_provider_rec(
sites,
self.presets["active_site"]) or {}
config_preset["active_site"]) or {}
if not local_rec or not local_rec.get("created_dt"):
tries = self._get_tries_count_from_rec(local_rec)
# file will be skipped if unsuccessfully tried over
# threshold times, error metadata needs to be purged
# manually in DB to reset
if tries < self.presets["retry_cnt"]:
if tries < int(config_preset["retry_cnt"]):
return SyncStatus.DO_DOWNLOAD
return SyncStatus.DO_NOTHING
async def upload(self, file, representation, provider_name, site_name,
tree=None):
tree=None, preset=None):
"""
Upload single 'file' of a 'representation' to 'provider'.
Source url is taken from 'file' portion, where {root} placeholder
@ -297,6 +424,7 @@ class SyncServer():
site_name (string): site on provider, single provider(gdrive) could
have multiple sites (different accounts, credentials)
tree (dictionary): injected memory structure for performance
preset (dictionary): site config ('credentials_url', 'root'...)
"""
# create ids sequentially, upload file in parallel later
@ -304,7 +432,8 @@ class SyncServer():
# this part modifies structure on 'remote_site', only single
# thread can do that at a time, upload/download to prepared
# structure should be run in parallel
handler = lib.factory.get_provider(provider_name, site_name, tree)
handler = lib.factory.get_provider(provider_name, site_name,
tree=tree, presets=preset)
remote_file = self._get_remote_file_path(file,
handler.get_roots_config()
)
@ -328,7 +457,7 @@ class SyncServer():
return file_id
async def download(self, file, representation, provider_name,
site_name, tree=None):
site_name, tree=None, preset=None):
"""
Downloads file to local folder denoted in representation.Context.
@ -339,12 +468,14 @@ class SyncServer():
site_name (string): site on provider, single provider(gdrive) could
have multiple sites (different accounts, credentials)
tree (dictionary): injected memory structure for performance
preset (dictionary): site config ('credentials_url', 'root'...)
Returns:
(string) - 'name' of local file
"""
with self.lock:
handler = lib.factory.get_provider(provider_name, site_name, tree)
handler = lib.factory.get_provider(provider_name, site_name,
tree=tree, presets=preset)
remote_file = self._get_remote_file_path(file,
handler.get_roots_config()
)
@ -419,43 +550,6 @@ class SyncServer():
source_file=source_file,
error_str=error_str))
def tray_start(self):
"""
Triggered when Tray is started. Checks if configuration presets
are available and if there is any provider ('gdrive', 'S3') that
is activated (eg. has valid credentials).
Returns:
None
"""
if self.presets and self.active_sites:
self.sync_server_thread.start()
else:
log.debug("No presets or active providers. " +
"Synchronization not possible.")
def tray_exit(self):
self.stop()
def thread_stopped(self):
self._is_running = False
@property
def is_running(self):
return self.sync_server_thread.is_running
def stop(self):
if not self.is_running:
return
try:
log.debug("Stopping sync server server")
self.sync_server_thread.is_running = False
self.sync_server_thread.stop()
except Exception:
log.warning(
"Error has happened during Killing sync server",
exc_info=True
)
def _get_file_info(self, files, _id):
"""
Return record from list of records which name matches to 'provider'
@ -511,7 +605,7 @@ class SyncServer():
"""
# TODO - implement reset for ALL files or ALL sites
query = {
"_id": io.ObjectId(representation_id)
"_id": ObjectId(representation_id)
}
self.connection.Session["AVALON_PROJECT"] = collection
representation = list(self.connection.find(query))
@ -538,14 +632,14 @@ class SyncServer():
update
)
def get_loop_delay(self):
def get_loop_delay(self, project_name):
"""
Return count of seconds before next synchronization loop starts
after finish of previous loop.
Returns:
(int): in seconds
"""
return self.presets["loop_delay"]
return int(self.presets[project_name]["config"]["loop_delay"])
def _get_success_dict(self, file_index, site_index, new_file_id):
"""
@ -642,7 +736,7 @@ class SyncServer():
path = path.format(**root_config)
return path
def _get_retries_arr(self):
def _get_retries_arr(self, project_name):
"""
Returns array with allowed values in 'tries' field. If repre
contains these values, it means it was tried to be synchronized
@ -651,7 +745,8 @@ class SyncServer():
Returns:
(list)
"""
arr = [i for i in range(self.presets["retry_cnt"])]
retry_cnt = self.presets[project_name].get("config")["retry_cnt"]
arr = [i for i in range(int(retry_cnt))]
arr.append(None)
return arr
@ -706,15 +801,16 @@ class SyncServerThread(threading.Thread):
while self.is_running:
import time
start_time = None
for collection in self.module.get_collections():
for collection, preset in self.module.get_synced_presets().\
items():
start_time = time.time()
sync_repres = self.module.get_sync_representations(
collection,
self.module.active_site,
self.module.remote_site
preset.get('config')["active_site"],
preset.get('config')["remote_site"]
)
local = self.module.active_site
local = preset.get('config')["active_site"]
task_files_to_process = []
files_processed_info = []
# process only unique file paths in one batch
@ -723,9 +819,12 @@ class SyncServerThread(threading.Thread):
# upload process can find already uploaded file and
# reuse same id
processed_file_path = set()
for active_site in self.module.active_sites:
provider, site = active_site
handler = lib.factory.get_provider(provider, site)
for check_site in self.module.get_active_sites(collection):
provider, site = check_site
site_preset = preset.get('sites')[site]
handler = lib.factory.get_provider(provider,
site,
presets=site_preset)
limit = lib.factory.get_provider_batch_limit(provider)
# first call to get_provider could be expensive, its
# building folder tree structure in memory
@ -741,8 +840,10 @@ class SyncServerThread(threading.Thread):
if file_path in processed_file_path:
continue
status = self.module.check_status(file,
provider)
status = self.module.check_status(
file,
provider,
preset.get('config'))
if status == SyncStatus.DO_UPLOAD:
tree = handler.get_tree()
limit -= 1
@ -751,7 +852,8 @@ class SyncServerThread(threading.Thread):
sync,
provider,
site,
tree))
tree,
site_preset))
task_files_to_process.append(task)
# store info for exception handling
files_processed_info.append((file,
@ -762,11 +864,12 @@ class SyncServerThread(threading.Thread):
tree = handler.get_tree()
limit -= 1
task = asyncio.create_task(
self.module.download(file,
sync,
provider,
site,
tree))
self.module.download(file,
sync,
provider,
site,
tree,
site_preset))
task_files_to_process.append(task)
files_processed_info.append((file,
@ -794,7 +897,7 @@ class SyncServerThread(threading.Thread):
duration = time.time() - start_time
log.debug("One loop took {:.2f}s".format(duration))
await asyncio.sleep(self.module.get_loop_delay())
await asyncio.sleep(self.module.get_loop_delay(collection))
except ConnectionResetError:
log.warning("ConnectionResetError in sync loop, trying next loop",
exc_info=True)

View file

@ -15,7 +15,7 @@ from avalon import io
from avalon.vendor import filelink
import pype.api
from datetime import datetime
from pype.api import config
from pype.modules import ModulesManager
# this is needed until speedcopy for linux is fixed
if sys.platform == "win32":
@ -929,16 +929,20 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
Returns:
rec: dictionary with filled info
"""
local_site = 'studio' # default
remote_site = None
sync_server_presets = None
manager = ModulesManager()
sync_server = manager.modules_by_name["sync_server"]
try:
sync_server_presets = config.get_presets()["sync_server"]["config"]
except KeyError:
if sync_server.enabled:
local_site, remote_site = sync_server.get_sites_for_project()
except ValueError:
log.debug(("There are not set presets for SyncServer."
" No credentials provided, no synching possible").
format(str(sync_server_presets)))
local_site = sync_server_presets.get("active_site", "studio").strip()
remote_site = sync_server_presets.get("remote_site")
rec = {
"_id": io.ObjectId(),
"path": path

View file

@ -178,5 +178,22 @@
"editorial[ftrack.Folder]": {}
}
}
},
"sync_server": {
"enabled": false,
"config": {
"local_id": "local_0",
"retry_cnt": "3",
"loop_delay": "60",
"active_site": "studio",
"remote_site": "gdrive"
},
"sites": {
"gdrive": {
"provider": "gdrive",
"credentials_url": "",
"root": "/sync_testing/test"
}
}
}
}

View file

@ -20,7 +20,8 @@
"PYPE_PROJECT_CONFIGS",
"PYPE_PYTHON_EXE",
"PYPE_OCIO_CONFIG",
"PYBLISH_GUI"
"PYBLISH_GUI",
"QT_AUTO_SCREEN_SCALE_FACTOR"
]
},
"FFMPEG_PATH": {
@ -44,6 +45,7 @@
"darwin": "{VIRTUAL_ENV}/bin/python"
},
"PYPE_OCIO_CONFIG": "{STUDIO_SOFT}/OpenColorIO-Configs",
"PYBLISH_GUI": "pyblish_pype"
"PYBLISH_GUI": "pyblish_pype",
"QT_AUTO_SCREEN_SCALE_FACTOR": "1"
}
}

View file

@ -117,6 +117,9 @@
"enabled": false,
"workspace_name": "studio name"
},
"sync_server": {
"enabled": false
},
"deadline": {
"enabled": true,
"DEADLINE_REST_URL": "http://localhost:8082"
@ -150,4 +153,4 @@
"idle_manager": {
"enabled": true
}
}
}

View file

@ -23,6 +23,11 @@
"key": "project_folder_structure",
"label": ""
}]
},
{
"type": "schema",
"name": "schema_project_syncserver"
}
]
}

View file

@ -0,0 +1,76 @@
{
"type": "dict",
"key": "sync_server",
"label": "Sync Server (currently unused)",
"collapsable": true,
"checkbox_key": "enabled",
"is_file": true,
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "dict",
"key": "config",
"label": "Config",
"collapsable": true,
"children": [
{
"type": "text",
"key": "local_id",
"label": "Local ID"
},
{
"type": "text",
"key": "retry_cnt",
"label": "Retry Count"
},
{
"type": "text",
"key": "loop_delay",
"label": "Loop Delay"
},
{
"type": "text",
"key": "active_site",
"label": "Active Site"
},
{
"type": "text",
"key": "remote_site",
"label": "Remote Site"
}
]
}, {
"type": "dict-modifiable",
"collapsable": true,
"key": "sites",
"label": "Sites",
"collapsable_key": false,
"is_file": true,
"object_type":
{
"type": "dict",
"children": [
{
"type": "text",
"key": "provider",
"label": "Provider"
},
{
"type": "text",
"key": "credentials_url",
"label": "Credentials url"
},
{
"type": "text",
"key": "root",
"label": "Root"
}]
}
}
]
}

View file

@ -110,7 +110,18 @@
"label": "Workspace name"
}
]
}, {
}, {
"type": "dict",
"key": "sync_server",
"label": "Sync Server",
"collapsable": true,
"checkbox_key": "enabled",
"children": [{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
}]
},{
"type": "dict",
"key": "deadline",
"label": "Deadline",
@ -217,4 +228,4 @@
]
}
]
}
}