Merge branch 'main' into develop

This commit is contained in:
Milan Kolar 2021-02-19 13:58:09 +01:00
commit eb09680dbd
16 changed files with 1619 additions and 361 deletions

View file

@ -62,7 +62,7 @@ Needed configuration:
- `"local_id": "local_0",` -- identifier of user pype
- `"retry_cnt": 3,` -- how many times try to synch file in case of error
- `"loop_delay": 60,` -- how many seconds between sync loops
- `"active_site": "studio",` -- which site user current, 'studio' by default,
- `"publish_site": "studio",` -- which site user current, 'studio' by default,
could by same as 'local_id' if user is working
from home without connection to studio
infrastructure
@ -71,7 +71,7 @@ Needed configuration:
Used in IntegrateNew to prepare skeleton for
syncing in the representation record.
Leave empty if no syncing is wanted.
This is a general configuration, 'local_id', 'active_site' and 'remote_site'
This is a general configuration, 'local_id', 'publish_site' and 'remote_site'
will be set and changed by some GUI in the future.
`pype/settings/defaults/project_settings/global.json`.`sync_server`.`sites`:

View file

@ -3,6 +3,13 @@ from abc import ABCMeta, abstractmethod
class AbstractProvider(metaclass=ABCMeta):
def __init__(self, site_name, tree=None, presets=None):
self.presets = None
self.active = False
self.site_name = site_name
self.presets = presets
@abstractmethod
def is_active(self):
"""
@ -27,13 +34,14 @@ class AbstractProvider(metaclass=ABCMeta):
pass
@abstractmethod
def download_file(self, source_path, local_path):
def download_file(self, source_path, local_path, overwrite=True):
"""
Download file from provider into local system
Args:
source_path (string): absolute path on provider
local_path (string): absolute path on local
overwrite (bool): default set to True
Returns:
None
"""

View file

@ -351,6 +351,10 @@ class GDriveHandler(AbstractProvider):
last_tick = status = response = None
status_val = 0
while response is None:
if server.is_representation_paused(representation['_id'],
check_parents=True,
project_name=collection):
raise ValueError("Paused during process, please redo.")
if status:
status_val = float(status.progress())
if not last_tick or \
@ -433,6 +437,10 @@ class GDriveHandler(AbstractProvider):
last_tick = status = response = None
status_val = 0
while response is None:
if server.is_representation_paused(representation['_id'],
check_parents=True,
project_name=collection):
raise ValueError("Paused during process, please redo.")
if status:
status_val = float(status.progress())
if not last_tick or \

View file

@ -1,10 +1,6 @@
from enum import Enum
from .gdrive import GDriveHandler
class Providers(Enum):
LOCAL = 'studio'
GDRIVE = 'gdrive'
from .local_drive import LocalDriveHandler
class ProviderFactory:
@ -94,3 +90,4 @@ factory = ProviderFactory()
# 7 denotes number of files that could be synced in single loop - learned by
# trial and error
factory.register_provider('gdrive', GDriveHandler, 7)
factory.register_provider('local_drive', LocalDriveHandler, 10)

View file

@ -0,0 +1,59 @@
from __future__ import print_function
import os.path
import shutil
from pype.api import Logger
from .abstract_provider import AbstractProvider
log = Logger().get_logger("SyncServer")
class LocalDriveHandler(AbstractProvider):
""" Handles required operations on mounted disks with OS """
def is_active(self):
return True
def upload_file(self, source_path, target_path, overwrite=True):
"""
Copies file from 'source_path' to 'target_path'
"""
if os.path.exists(source_path):
if overwrite:
shutil.copy(source_path, target_path)
else:
if os.path.exists(target_path):
raise ValueError("File {} exists, set overwrite".
format(target_path))
def download_file(self, source_path, local_path, overwrite=True):
"""
Download a file form 'source_path' to 'local_path'
"""
if os.path.exists(source_path):
if overwrite:
shutil.copy(source_path, local_path)
else:
if os.path.exists(local_path):
raise ValueError("File {} exists, set overwrite".
format(local_path))
def delete_file(self, path):
"""
Deletes a file at 'path'
"""
if os.path.exists(path):
os.remove(path)
def list_folder(self, folder_path):
"""
Returns list of files and subfolder in a 'folder_path'. Non recurs
"""
lst = []
if os.path.isdir(folder_path):
for (dir_path, dir_names, file_names) in os.walk(folder_path):
for name in file_names:
lst.append(os.path.join(dir_path, name))
for name in dir_names:
lst.append(os.path.join(dir_path, name))
return lst

Binary file not shown.

After

Width:  |  Height:  |  Size: 766 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 557 B

After

Width:  |  Height:  |  Size: 1.2 KiB

Before After
Before After

Binary file not shown.

After

Width:  |  Height:  |  Size: 692 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 561 B

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -298,6 +298,62 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
repre["data"] = repre_data
repre.pop("_id", None)
# Prepare paths of source and destination files
if len(published_files) == 1:
src_to_dst_file_paths.append(
(published_files[0], template_filled)
)
else:
collections, remainders = clique.assemble(published_files)
if remainders or not collections or len(collections) > 1:
raise Exception((
"Integrity error. Files of published representation "
"is combination of frame collections and single files."
"Collections: `{}` Single files: `{}`"
).format(str(collections),
str(remainders)))
src_col = collections[0]
# Get head and tail for collection
frame_splitter = "_-_FRAME_SPLIT_-_"
anatomy_data["frame"] = frame_splitter
_anatomy_filled = anatomy.format(anatomy_data)
_template_filled = _anatomy_filled["master"]["path"]
head, tail = _template_filled.split(frame_splitter)
padding = int(
anatomy.templates["render"].get(
"frame_padding",
anatomy.templates["render"].get("padding")
)
)
dst_col = clique.Collection(
head=head, padding=padding, tail=tail
)
dst_col.indexes.clear()
dst_col.indexes.update(src_col.indexes)
for src_file, dst_file in zip(src_col, dst_col):
src_to_dst_file_paths.append(
(src_file, dst_file)
)
# replace original file name with master name in repre doc
for index in range(len(repre.get("files"))):
file = repre.get("files")[index]
file_name = os.path.basename(file.get('path'))
for src_file, dst_file in src_to_dst_file_paths:
src_file_name = os.path.basename(src_file)
if src_file_name == file_name:
repre["files"][index]["path"] = self._update_path(
anatomy, repre["files"][index]["path"],
src_file, dst_file)
repre["files"][index]["hash"] = self._update_hash(
repre["files"][index]["hash"],
src_file_name, dst_file
)
schema.validate(repre)
repre_name_low = repre["name"].lower()
@ -333,46 +389,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
InsertOne(repre)
)
# Prepare paths of source and destination files
if len(published_files) == 1:
src_to_dst_file_paths.append(
(published_files[0], template_filled)
)
continue
collections, remainders = clique.assemble(published_files)
if remainders or not collections or len(collections) > 1:
raise Exception((
"Integrity error. Files of published representation "
"is combination of frame collections and single files."
"Collections: `{}` Single files: `{}`"
).format(str(collections), str(remainders)))
src_col = collections[0]
# Get head and tail for collection
frame_splitter = "_-_FRAME_SPLIT_-_"
anatomy_data["frame"] = frame_splitter
_anatomy_filled = anatomy.format(anatomy_data)
_template_filled = _anatomy_filled["master"]["path"]
head, tail = _template_filled.split(frame_splitter)
padding = int(
anatomy.templates["render"].get(
"frame_padding",
anatomy.templates["render"].get("padding")
)
)
dst_col = clique.Collection(
head=head, padding=padding, tail=tail
)
dst_col.indexes.clear()
dst_col.indexes.update(src_col.indexes)
for src_file, dst_file in zip(src_col, dst_col):
src_to_dst_file_paths.append(
(src_file, dst_file)
)
self.path_checks = []
# Copy(hardlink) paths of source and destination files
@ -533,3 +549,39 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
"type": "representation"
}))
return (master_version, master_repres)
def _update_path(self, anatomy, path, src_file, dst_file):
"""
Replaces source path with new master path
'path' contains original path with version, must be replaced with
'master' path (with 'master' label and without version)
Args:
anatomy (Anatomy) - to get rootless style of path
path (string) - path from DB
src_file (string) - original file path
dst_file (string) - master file path
"""
_, rootless = anatomy.find_root_template_from_path(
dst_file
)
_, rtls_src = anatomy.find_root_template_from_path(
src_file
)
return path.replace(rtls_src, rootless)
def _update_hash(self, hash, src_file_name, dst_file):
"""
Updates hash value with proper master name
"""
src_file_name = self._get_name_without_ext(
src_file_name)
master_file_name = self._get_name_without_ext(
dst_file)
return hash.replace(src_file_name, master_file_name)
def _get_name_without_ext(self, value):
file_name = os.path.basename(value)
file_name, _ = os.path.splitext(file_name)
return file_name

View file

@ -180,7 +180,7 @@
}
},
"sync_server": {
"enabled": false,
"enabled": true,
"config": {
"local_id": "local_0",
"retry_cnt": "3",
@ -192,7 +192,23 @@
"gdrive": {
"provider": "gdrive",
"credentials_url": "",
"root": "/sync_testing/test"
"root": {
"work": ""
}
},
"studio": {
"provider": "local_drive",
"credentials_url": "",
"root": {
"work": ""
}
},
"local_0": {
"provider": "local_drive",
"credentials_url": "",
"root": {
"work": ""
}
}
}
}

View file

@ -66,10 +66,14 @@
"label": "Credentials url"
},
{
"type": "text",
"type": "dict-modifiable",
"key": "root",
"label": "Root"
}]
"label": "Roots",
"collapsable": false,
"collapsable_key": false,
"object_type": "text"
}
]
}
}
]