mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
Merge branch 'develop' into enhancement/OP-2858_move-AvalonMongoDB-logic
This commit is contained in:
commit
a579cd31c8
116 changed files with 3112 additions and 1204 deletions
|
|
@ -285,6 +285,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
||||
if instance.data.get("suspend_publish"):
|
||||
payload["JobInfo"]["InitialStatus"] = "Suspended"
|
||||
|
||||
index = 0
|
||||
for key in environment:
|
||||
if key.upper() in self.enviro_filter:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import os
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
import collections
|
||||
import copy
|
||||
from openpype.api import Anatomy
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class CreateFolders(BaseAction):
|
||||
|
|
@ -9,55 +11,59 @@ class CreateFolders(BaseAction):
|
|||
icon = statics_icon("ftrack", "action_icons", "CreateFolders.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
not_allowed = ["assetversion", "project"]
|
||||
if entities[0].entity_type.lower() in not_allowed:
|
||||
return False
|
||||
|
||||
return True
|
||||
for entity_item in event["data"]["selection"]:
|
||||
if entity_item.get("entityType").lower() in ("task", "show"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event["data"].get("values", {}):
|
||||
return
|
||||
entity = entities[0]
|
||||
without_interface = True
|
||||
for child in entity["children"]:
|
||||
if child["object_type"]["name"].lower() != "task":
|
||||
without_interface = False
|
||||
|
||||
with_interface = False
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() != "task":
|
||||
with_interface = True
|
||||
break
|
||||
self.without_interface = without_interface
|
||||
if without_interface:
|
||||
|
||||
if "values" not in event["data"]:
|
||||
event["data"]["values"] = {}
|
||||
|
||||
event["data"]["values"]["with_interface"] = with_interface
|
||||
if not with_interface:
|
||||
return
|
||||
|
||||
title = "Create folders"
|
||||
|
||||
entity_name = entity["name"]
|
||||
msg = (
|
||||
"<h2>Do you want create folders also"
|
||||
" for all children of \"{}\"?</h2>"
|
||||
" for all children of your selection?</h2>"
|
||||
)
|
||||
if entity.entity_type.lower() == "project":
|
||||
entity_name = entity["full_name"]
|
||||
msg = msg.replace(" also", "")
|
||||
msg += "<h3>(Project root won't be created if not checked)</h3>"
|
||||
items = []
|
||||
item_msg = {
|
||||
"type": "label",
|
||||
"value": msg.format(entity_name)
|
||||
}
|
||||
item_label = {
|
||||
"type": "label",
|
||||
"value": "With all chilren entities"
|
||||
}
|
||||
item = {
|
||||
"name": "children_included",
|
||||
"type": "boolean",
|
||||
"value": False
|
||||
}
|
||||
items.append(item_msg)
|
||||
items.append(item_label)
|
||||
items.append(item)
|
||||
items = [
|
||||
{
|
||||
"type": "label",
|
||||
"value": msg.format(entity_name)
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"value": "With all chilren entities"
|
||||
},
|
||||
{
|
||||
"name": "children_included",
|
||||
"type": "boolean",
|
||||
"value": False
|
||||
},
|
||||
{
|
||||
"type": "hidden",
|
||||
"name": "with_interface",
|
||||
"value": with_interface
|
||||
}
|
||||
]
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
|
|
@ -66,30 +72,47 @@ class CreateFolders(BaseAction):
|
|||
|
||||
def launch(self, session, entities, event):
|
||||
'''Callback method for custom action.'''
|
||||
|
||||
if "values" not in event["data"]:
|
||||
return
|
||||
|
||||
with_interface = event["data"]["values"]["with_interface"]
|
||||
with_childrens = True
|
||||
if self.without_interface is False:
|
||||
if "values" not in event["data"]:
|
||||
return
|
||||
if with_interface:
|
||||
with_childrens = event["data"]["values"]["children_included"]
|
||||
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() == "project":
|
||||
proj = entity
|
||||
else:
|
||||
proj = entity["project"]
|
||||
project_name = proj["full_name"]
|
||||
project_code = proj["name"]
|
||||
filtered_entities = []
|
||||
for entity in entities:
|
||||
low_context_type = entity["context_type"].lower()
|
||||
if low_context_type in ("task", "show"):
|
||||
if not with_childrens and low_context_type == "show":
|
||||
continue
|
||||
filtered_entities.append(entity)
|
||||
|
||||
if entity.entity_type.lower() == 'project' and with_childrens is False:
|
||||
if not filtered_entities:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Nothing was created'
|
||||
"success": True,
|
||||
"message": 'Nothing was created'
|
||||
}
|
||||
|
||||
all_entities = []
|
||||
all_entities.append(entity)
|
||||
if with_childrens:
|
||||
all_entities = self.get_notask_children(entity)
|
||||
project_entity = self.get_project_from_entity(filtered_entities[0])
|
||||
|
||||
project_name = project_entity["full_name"]
|
||||
project_code = project_entity["name"]
|
||||
|
||||
task_entities = []
|
||||
other_entities = []
|
||||
self.get_all_entities(
|
||||
session, entities, task_entities, other_entities
|
||||
)
|
||||
hierarchy = self.get_entities_hierarchy(
|
||||
session, task_entities, other_entities
|
||||
)
|
||||
task_types = session.query("select id, name from Type").all()
|
||||
task_type_names_by_id = {
|
||||
task_type["id"]: task_type["name"]
|
||||
for task_type in task_types
|
||||
}
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
|
|
@ -97,77 +120,67 @@ class CreateFolders(BaseAction):
|
|||
work_template = anatomy.templates
|
||||
for key in work_keys:
|
||||
work_template = work_template[key]
|
||||
work_has_apps = "{app" in work_template
|
||||
|
||||
publish_keys = ["publish", "folder"]
|
||||
publish_template = anatomy.templates
|
||||
for key in publish_keys:
|
||||
publish_template = publish_template[key]
|
||||
publish_has_apps = "{app" in publish_template
|
||||
|
||||
project_data = {
|
||||
"project": {
|
||||
"name": project_name,
|
||||
"code": project_code
|
||||
}
|
||||
}
|
||||
|
||||
collected_paths = []
|
||||
for entity in all_entities:
|
||||
if entity.entity_type.lower() == "project":
|
||||
continue
|
||||
ent_data = {
|
||||
"project": {
|
||||
"name": project_name,
|
||||
"code": project_code
|
||||
}
|
||||
}
|
||||
for item in hierarchy:
|
||||
parent_entity, task_entities = item
|
||||
|
||||
ent_data["asset"] = entity["name"]
|
||||
parent_data = copy.deepcopy(project_data)
|
||||
|
||||
parents = entity["link"][1:-1]
|
||||
parents = parent_entity["link"][1:-1]
|
||||
hierarchy_names = [p["name"] for p in parents]
|
||||
hierarchy = ""
|
||||
hierarchy = "/".join(hierarchy_names)
|
||||
|
||||
if hierarchy_names:
|
||||
hierarchy = os.path.sep.join(hierarchy_names)
|
||||
ent_data["hierarchy"] = hierarchy
|
||||
parent_name = hierarchy_names[-1]
|
||||
else:
|
||||
parent_name = project_name
|
||||
|
||||
tasks_created = False
|
||||
for child in entity["children"]:
|
||||
if child["object_type"]["name"].lower() != "task":
|
||||
continue
|
||||
tasks_created = True
|
||||
task_data = ent_data.copy()
|
||||
task_data["task"] = child["name"]
|
||||
parent_data.update({
|
||||
"asset": parent_entity["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"parent": parent_name
|
||||
})
|
||||
|
||||
apps = []
|
||||
|
||||
# Template wok
|
||||
if work_has_apps:
|
||||
app_data = task_data.copy()
|
||||
for app in apps:
|
||||
app_data["app"] = app
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, app_data, work_keys
|
||||
))
|
||||
else:
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, task_data, work_keys
|
||||
))
|
||||
|
||||
# Template publish
|
||||
if publish_has_apps:
|
||||
app_data = task_data.copy()
|
||||
for app in apps:
|
||||
app_data["app"] = app
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, app_data, publish_keys
|
||||
))
|
||||
else:
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, task_data, publish_keys
|
||||
))
|
||||
|
||||
if not tasks_created:
|
||||
if not task_entities:
|
||||
# create path for entity
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, ent_data, work_keys
|
||||
anatomy, parent_data, work_keys
|
||||
))
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, ent_data, publish_keys
|
||||
anatomy, parent_data, publish_keys
|
||||
))
|
||||
continue
|
||||
|
||||
for task_entity in task_entities:
|
||||
task_type_id = task_entity["type_id"]
|
||||
task_type_name = task_type_names_by_id[task_type_id]
|
||||
task_data = copy.deepcopy(parent_data)
|
||||
task_data["task"] = {
|
||||
"name": task_entity["name"],
|
||||
"type": task_type_name
|
||||
}
|
||||
|
||||
# Template wok
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, task_data, work_keys
|
||||
))
|
||||
|
||||
# Template publish
|
||||
collected_paths.append(self.compute_template(
|
||||
anatomy, task_data, publish_keys
|
||||
))
|
||||
|
||||
if len(collected_paths) == 0:
|
||||
|
|
@ -188,14 +201,65 @@ class CreateFolders(BaseAction):
|
|||
"message": "Successfully created project folders."
|
||||
}
|
||||
|
||||
def get_notask_children(self, entity):
|
||||
def get_all_entities(
|
||||
self, session, entities, task_entities, other_entities
|
||||
):
|
||||
if not entities:
|
||||
return
|
||||
|
||||
no_task_entities = []
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() == "task":
|
||||
task_entities.append(entity)
|
||||
else:
|
||||
no_task_entities.append(entity)
|
||||
|
||||
if not no_task_entities:
|
||||
return task_entities
|
||||
|
||||
other_entities.extend(no_task_entities)
|
||||
|
||||
no_task_entity_ids = [entity["id"] for entity in no_task_entities]
|
||||
next_entities = session.query((
|
||||
"select id, parent_id"
|
||||
" from TypedContext where parent_id in ({})"
|
||||
).format(self.join_query_keys(no_task_entity_ids))).all()
|
||||
|
||||
self.get_all_entities(
|
||||
session, next_entities, task_entities, other_entities
|
||||
)
|
||||
|
||||
def get_entities_hierarchy(self, session, task_entities, other_entities):
|
||||
task_entity_ids = [entity["id"] for entity in task_entities]
|
||||
full_task_entities = session.query((
|
||||
"select id, name, type_id, parent_id"
|
||||
" from TypedContext where id in ({})"
|
||||
).format(self.join_query_keys(task_entity_ids)))
|
||||
task_entities_by_parent_id = collections.defaultdict(list)
|
||||
for entity in full_task_entities:
|
||||
parent_id = entity["parent_id"]
|
||||
task_entities_by_parent_id[parent_id].append(entity)
|
||||
|
||||
output = []
|
||||
if entity.entity_type.lower() == "task":
|
||||
if not task_entities_by_parent_id:
|
||||
return output
|
||||
|
||||
output.append(entity)
|
||||
for child in entity["children"]:
|
||||
output.extend(self.get_notask_children(child))
|
||||
other_ids = set()
|
||||
for entity in other_entities:
|
||||
other_ids.add(entity["id"])
|
||||
other_ids |= set(task_entities_by_parent_id.keys())
|
||||
|
||||
parent_entities = session.query((
|
||||
"select id, name from TypedContext where id in ({})"
|
||||
).format(self.join_query_keys(other_ids))).all()
|
||||
|
||||
for parent_entity in parent_entities:
|
||||
parent_id = parent_entity["id"]
|
||||
output.append((
|
||||
parent_entity,
|
||||
task_entities_by_parent_id[parent_id]
|
||||
))
|
||||
|
||||
return output
|
||||
|
||||
def compute_template(self, anatomy, data, anatomy_keys):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
from openpype.api import get_project_basic_paths, create_project_folders
|
||||
|
|
|
|||
|
|
@ -31,10 +31,13 @@ TOPIC_STATUS_SERVER = "openpype.event.server.status"
|
|||
TOPIC_STATUS_SERVER_RESULT = "openpype.event.server.status.result"
|
||||
|
||||
|
||||
def check_ftrack_url(url, log_errors=True):
|
||||
def check_ftrack_url(url, log_errors=True, logger=None):
|
||||
"""Checks if Ftrack server is responding"""
|
||||
if logger is None:
|
||||
logger = Logger.get_logger(__name__)
|
||||
|
||||
if not url:
|
||||
print('ERROR: Ftrack URL is not set!')
|
||||
logger.error("Ftrack URL is not set!")
|
||||
return None
|
||||
|
||||
url = url.strip('/ ')
|
||||
|
|
@ -48,15 +51,15 @@ def check_ftrack_url(url, log_errors=True):
|
|||
result = requests.get(url, allow_redirects=False)
|
||||
except requests.exceptions.RequestException:
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
logger.error("Entered Ftrack URL is not accesible!")
|
||||
return False
|
||||
|
||||
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
|
||||
if log_errors:
|
||||
print('ERROR: Entered Ftrack URL is not accesible!')
|
||||
logger.error("Entered Ftrack URL is not accesible!")
|
||||
return False
|
||||
|
||||
print('DEBUG: Ftrack server {} is accessible.'.format(url))
|
||||
logger.debug("Ftrack server {} is accessible.".format(url))
|
||||
|
||||
return url
|
||||
|
||||
|
|
@ -133,7 +136,7 @@ class ProcessEventHub(SocketBaseEventHub):
|
|||
hearbeat_msg = b"processor"
|
||||
|
||||
is_collection_created = False
|
||||
pypelog = Logger().get_logger("Session Processor")
|
||||
pypelog = Logger.get_logger("Session Processor")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.mongo_url = None
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ class OpenPypeContextSelector:
|
|||
# app names and versions, but since app_name is not used
|
||||
# currently down the line (but it is required by OP publish command
|
||||
# right now).
|
||||
self.context["app_name"] = "maya/2020"
|
||||
# self.context["app_name"] = "maya/2022"
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -139,7 +139,8 @@ class OpenPypeContextSelector:
|
|||
env = {"AVALON_PROJECT": str(self.context.get("project")),
|
||||
"AVALON_ASSET": str(self.context.get("asset")),
|
||||
"AVALON_TASK": str(self.context.get("task")),
|
||||
"AVALON_APP_NAME": str(self.context.get("app_name"))}
|
||||
# "AVALON_APP_NAME": str(self.context.get("app_name"))
|
||||
}
|
||||
|
||||
print(">>> setting environment:")
|
||||
for k, v in env.items():
|
||||
|
|
@ -184,7 +185,7 @@ selector = OpenPypeContextSelector()
|
|||
selector.context["project"] = os.getenv("AVALON_PROJECT")
|
||||
selector.context["asset"] = os.getenv("AVALON_ASSET")
|
||||
selector.context["task"] = os.getenv("AVALON_TASK")
|
||||
selector.context["app_name"] = os.getenv("AVALON_APP_NAME")
|
||||
# selector.context["app_name"] = os.getenv("AVALON_APP_NAME")
|
||||
|
||||
# if anything inside is None, scratch the whole thing and
|
||||
# ask user for context.
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from datetime import datetime
|
|||
import threading
|
||||
import platform
|
||||
import copy
|
||||
from collections import deque
|
||||
from collections import deque, defaultdict
|
||||
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
|
|
@ -25,7 +25,7 @@ from openpype.settings.lib import (
|
|||
from .providers.local_drive import LocalDriveHandler
|
||||
from .providers import lib
|
||||
|
||||
from .utils import time_function, SyncStatus
|
||||
from .utils import time_function, SyncStatus, SiteAlreadyPresentError
|
||||
|
||||
|
||||
log = PypeLogger().get_logger("SyncServer")
|
||||
|
|
@ -133,21 +133,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
def add_site(self, collection, representation_id, site_name=None,
|
||||
force=False):
|
||||
"""
|
||||
Adds new site to representation to be synced.
|
||||
Adds new site to representation to be synced.
|
||||
|
||||
'collection' must have synchronization enabled (globally or
|
||||
project only)
|
||||
'collection' must have synchronization enabled (globally or
|
||||
project only)
|
||||
|
||||
Used as a API endpoint from outside applications (Loader etc)
|
||||
Used as a API endpoint from outside applications (Loader etc).
|
||||
|
||||
Args:
|
||||
collection (string): project name (must match DB)
|
||||
representation_id (string): MongoDB _id value
|
||||
site_name (string): name of configured and active site
|
||||
force (bool): reset site if exists
|
||||
Use 'force' to reset existing site.
|
||||
|
||||
Returns:
|
||||
throws ValueError if any issue
|
||||
Args:
|
||||
collection (string): project name (must match DB)
|
||||
representation_id (string): MongoDB _id value
|
||||
site_name (string): name of configured and active site
|
||||
force (bool): reset site if exists
|
||||
|
||||
Throws:
|
||||
SiteAlreadyPresentError - if adding already existing site and
|
||||
not 'force'
|
||||
ValueError - other errors (repre not found, misconfiguration)
|
||||
"""
|
||||
if not self.get_sync_project_setting(collection):
|
||||
raise ValueError("Project not configured")
|
||||
|
|
@ -157,9 +161,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
self.reset_site_on_representation(collection,
|
||||
representation_id,
|
||||
site_name=site_name, force=force)
|
||||
site_name=site_name,
|
||||
force=force)
|
||||
|
||||
# public facing API
|
||||
def remove_site(self, collection, representation_id, site_name,
|
||||
remove_local_files=False):
|
||||
"""
|
||||
|
|
@ -186,6 +190,151 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if remove_local_files:
|
||||
self._remove_local_file(collection, representation_id, site_name)
|
||||
|
||||
def compute_resource_sync_sites(self, project_name):
|
||||
"""Get available resource sync sites state for publish process.
|
||||
|
||||
Returns dict with prepared state of sync sites for 'project_name'.
|
||||
It checks if Site Sync is enabled, handles alternative sites.
|
||||
Publish process stores this dictionary as a part of representation
|
||||
document in DB.
|
||||
|
||||
Example:
|
||||
[
|
||||
{
|
||||
'name': '42abbc09-d62a-44a4-815c-a12cd679d2d7',
|
||||
'created_dt': datetime.datetime(2022, 3, 30, 12, 16, 9, 778637)
|
||||
},
|
||||
{'name': 'studio'},
|
||||
{'name': 'SFTP'}
|
||||
] -- representation is published locally, artist or Settings have set
|
||||
remote site as 'studio'. 'SFTP' is alternate site to 'studio'. Eg.
|
||||
whenever file is on 'studio', it is also on 'SFTP'.
|
||||
"""
|
||||
|
||||
def create_metadata(name, created=True):
|
||||
"""Create sync site metadata for site with `name`"""
|
||||
metadata = {"name": name}
|
||||
if created:
|
||||
metadata["created_dt"] = datetime.now()
|
||||
return metadata
|
||||
|
||||
if (
|
||||
not self.sync_system_settings["enabled"] or
|
||||
not self.sync_project_settings[project_name]["enabled"]):
|
||||
return [create_metadata(self.DEFAULT_SITE)]
|
||||
|
||||
local_site = self.get_active_site(project_name)
|
||||
remote_site = self.get_remote_site(project_name)
|
||||
|
||||
# Attached sites metadata by site name
|
||||
# That is the local site, remote site, the always accesible sites
|
||||
# and their alternate sites (alias of sites with different protocol)
|
||||
attached_sites = dict()
|
||||
attached_sites[local_site] = create_metadata(local_site)
|
||||
|
||||
if remote_site and remote_site not in attached_sites:
|
||||
attached_sites[remote_site] = create_metadata(remote_site,
|
||||
created=False)
|
||||
|
||||
attached_sites = self._add_alternative_sites(attached_sites)
|
||||
# add skeleton for sites where it should be always synced to
|
||||
# usually it would be a backup site which is handled by separate
|
||||
# background process
|
||||
for site in self._get_always_accessible_sites(project_name):
|
||||
if site not in attached_sites:
|
||||
attached_sites[site] = create_metadata(site, created=False)
|
||||
|
||||
return list(attached_sites.values())
|
||||
|
||||
def _get_always_accessible_sites(self, project_name):
|
||||
"""Sites that synced to as a part of background process.
|
||||
|
||||
Artist machine doesn't handle those, explicit Tray with that site name
|
||||
as a local id must be running.
|
||||
Example is dropbox site serving as a backup solution
|
||||
"""
|
||||
always_accessible_sites = (
|
||||
self.get_sync_project_setting(project_name)["config"].
|
||||
get("always_accessible_on", [])
|
||||
)
|
||||
return [site.strip() for site in always_accessible_sites]
|
||||
|
||||
def _add_alternative_sites(self, attached_sites):
|
||||
"""Add skeleton document for alternative sites
|
||||
|
||||
Each new configured site in System Setting could serve as a alternative
|
||||
site, it's a kind of alias. It means that files on 'a site' are
|
||||
physically accessible also on 'a alternative' site.
|
||||
Example is sftp site serving studio files via sftp protocol, physically
|
||||
file is only in studio, sftp server has this location mounted.
|
||||
"""
|
||||
additional_sites = self.sync_system_settings.get("sites", {})
|
||||
|
||||
alt_site_pairs = self._get_alt_site_pairs(additional_sites)
|
||||
|
||||
for site_name in additional_sites.keys():
|
||||
# Get alternate sites (stripped names) for this site name
|
||||
alt_sites = alt_site_pairs.get(site_name)
|
||||
alt_sites = [site.strip() for site in alt_sites]
|
||||
alt_sites = set(alt_sites)
|
||||
|
||||
# If no alternative sites we don't need to add
|
||||
if not alt_sites:
|
||||
continue
|
||||
|
||||
# Take a copy of data of the first alternate site that is already
|
||||
# defined as an attached site to match the same state.
|
||||
match_meta = next((attached_sites[site] for site in alt_sites
|
||||
if site in attached_sites), None)
|
||||
if not match_meta:
|
||||
continue
|
||||
|
||||
alt_site_meta = copy.deepcopy(match_meta)
|
||||
alt_site_meta["name"] = site_name
|
||||
|
||||
# Note: We change mutable `attached_site` dict in-place
|
||||
attached_sites[site_name] = alt_site_meta
|
||||
|
||||
return attached_sites
|
||||
|
||||
def _get_alt_site_pairs(self, conf_sites):
|
||||
"""Returns dict of site and its alternative sites.
|
||||
|
||||
If `site` has alternative site, it means that alt_site has 'site' as
|
||||
alternative site
|
||||
Args:
|
||||
conf_sites (dict)
|
||||
Returns:
|
||||
(dict): {'site': [alternative sites]...}
|
||||
"""
|
||||
alt_site_pairs = defaultdict(set)
|
||||
for site_name, site_info in conf_sites.items():
|
||||
alt_sites = set(site_info.get("alternative_sites", []))
|
||||
alt_site_pairs[site_name].update(alt_sites)
|
||||
|
||||
for alt_site in alt_sites:
|
||||
alt_site_pairs[alt_site].add(site_name)
|
||||
|
||||
for site_name, alt_sites in alt_site_pairs.items():
|
||||
sites_queue = deque(alt_sites)
|
||||
while sites_queue:
|
||||
alt_site = sites_queue.popleft()
|
||||
|
||||
# safety against wrong config
|
||||
# {"SFTP": {"alternative_site": "SFTP"}
|
||||
if alt_site == site_name or alt_site not in alt_site_pairs:
|
||||
continue
|
||||
|
||||
for alt_alt_site in alt_site_pairs[alt_site]:
|
||||
if (
|
||||
alt_alt_site != site_name
|
||||
and alt_alt_site not in alt_sites
|
||||
):
|
||||
alt_sites.add(alt_alt_site)
|
||||
sites_queue.append(alt_alt_site)
|
||||
|
||||
return alt_site_pairs
|
||||
|
||||
def clear_project(self, collection, site_name):
|
||||
"""
|
||||
Clear 'collection' of 'site_name' and its local files
|
||||
|
|
@ -209,36 +358,38 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
def create_validate_project_task(self, collection, site_name):
|
||||
"""Adds metadata about project files validation on a queue.
|
||||
|
||||
This process will loop through all representation and check if
|
||||
their files actually exist on an active site.
|
||||
This process will loop through all representation and check if
|
||||
their files actually exist on an active site.
|
||||
|
||||
This might be useful for edge cases when artists is switching
|
||||
between sites, remote site is actually physically mounted and
|
||||
active site has same file urls etc.
|
||||
It also checks if site is set in DB, but file is physically not
|
||||
present
|
||||
|
||||
Task will run on a asyncio loop, shouldn't be blocking.
|
||||
This might be useful for edge cases when artists is switching
|
||||
between sites, remote site is actually physically mounted and
|
||||
active site has same file urls etc.
|
||||
|
||||
Task will run on a asyncio loop, shouldn't be blocking.
|
||||
"""
|
||||
task = {
|
||||
"type": "validate",
|
||||
"project_name": collection,
|
||||
"func": lambda: self.validate_project(collection, site_name)
|
||||
"func": lambda: self.validate_project(collection, site_name,
|
||||
reset_missing=True)
|
||||
}
|
||||
self.projects_processed.add(collection)
|
||||
self.long_running_tasks.append(task)
|
||||
|
||||
def validate_project(self, collection, site_name, remove_missing=False):
|
||||
"""
|
||||
Validate 'collection' of 'site_name' and its local files
|
||||
def validate_project(self, collection, site_name, reset_missing=False):
|
||||
"""Validate 'collection' of 'site_name' and its local files
|
||||
|
||||
If file present and not marked with a 'site_name' in DB, DB is
|
||||
updated with site name and file modified date.
|
||||
If file present and not marked with a 'site_name' in DB, DB is
|
||||
updated with site name and file modified date.
|
||||
|
||||
Args:
|
||||
module (SyncServerModule)
|
||||
collection (string): project name
|
||||
site_name (string): active site name
|
||||
remove_missing (bool): if True remove sites in DB if missing
|
||||
physically
|
||||
Args:
|
||||
collection (string): project name
|
||||
site_name (string): active site name
|
||||
reset_missing (bool): if True reset site in DB if missing
|
||||
physically
|
||||
"""
|
||||
self.log.debug("Validation of {} for {} started".format(collection,
|
||||
site_name))
|
||||
|
|
@ -253,29 +404,32 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
return
|
||||
|
||||
sites_added = 0
|
||||
sites_removed = 0
|
||||
sites_reset = 0
|
||||
for repre in representations:
|
||||
repre_id = repre["_id"]
|
||||
for repre_file in repre.get("files", []):
|
||||
try:
|
||||
has_site = site_name in [site["name"]
|
||||
for site in repre_file["sites"]]
|
||||
except TypeError:
|
||||
is_on_site = site_name in [site["name"]
|
||||
for site in repre_file["sites"]
|
||||
if (site.get("created_dt") and
|
||||
not site.get("error"))]
|
||||
except (TypeError, AttributeError):
|
||||
self.log.debug("Structure error in {}".format(repre_id))
|
||||
continue
|
||||
|
||||
if has_site and not remove_missing:
|
||||
continue
|
||||
|
||||
file_path = repre_file.get("path", "")
|
||||
local_file_path = self.get_local_file_path(collection,
|
||||
site_name,
|
||||
file_path)
|
||||
|
||||
if local_file_path and os.path.exists(local_file_path):
|
||||
self.log.debug("Adding site {} for {}".format(site_name,
|
||||
repre_id))
|
||||
if not has_site:
|
||||
file_exists = (local_file_path and
|
||||
os.path.exists(local_file_path))
|
||||
if not is_on_site:
|
||||
if file_exists:
|
||||
self.log.debug(
|
||||
"Adding site {} for {}".format(site_name,
|
||||
repre_id))
|
||||
|
||||
query = {
|
||||
"_id": repre_id
|
||||
}
|
||||
|
|
@ -283,27 +437,27 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
os.path.getmtime(local_file_path))
|
||||
elem = {"name": site_name,
|
||||
"created_dt": created_dt}
|
||||
self._add_site(collection, query, [repre], elem,
|
||||
self._add_site(collection, query, repre, elem,
|
||||
site_name=site_name,
|
||||
file_id=repre_file["_id"])
|
||||
file_id=repre_file["_id"],
|
||||
force=True)
|
||||
sites_added += 1
|
||||
else:
|
||||
if has_site and remove_missing:
|
||||
self.log.debug("Removing site {} for {}".
|
||||
if not file_exists and reset_missing:
|
||||
self.log.debug("Resetting site {} for {}".
|
||||
format(site_name, repre_id))
|
||||
self.reset_provider_for_file(collection,
|
||||
repre_id,
|
||||
file_id=repre_file["_id"],
|
||||
remove=True)
|
||||
sites_removed += 1
|
||||
self.reset_site_on_representation(
|
||||
collection, repre_id, site_name=site_name,
|
||||
file_id=repre_file["_id"])
|
||||
sites_reset += 1
|
||||
|
||||
if sites_added % 100 == 0:
|
||||
self.log.debug("Sites added {}".format(sites_added))
|
||||
|
||||
self.log.debug("Validation of {} for {} ended".format(collection,
|
||||
site_name))
|
||||
self.log.info("Sites added {}, sites removed {}".format(sites_added,
|
||||
sites_removed))
|
||||
self.log.info("Sites added {}, sites reset {}".format(sites_added,
|
||||
reset_missing))
|
||||
|
||||
def pause_representation(self, collection, representation_id, site_name):
|
||||
"""
|
||||
|
|
@ -821,7 +975,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
self.log.debug("Adding alternate {} to {}".format(
|
||||
alt_site, representation["_id"]))
|
||||
self._add_site(collection, query,
|
||||
[representation], elem,
|
||||
representation, elem,
|
||||
alt_site, file_id=file_id, force=True)
|
||||
|
||||
""" End of Public API """
|
||||
|
|
@ -1425,14 +1579,16 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
pause (bool or None): if True - pause, False - unpause
|
||||
force (bool): hard reset - currently only for add_site
|
||||
|
||||
Returns:
|
||||
throws ValueError
|
||||
Raises:
|
||||
SiteAlreadyPresentError - if adding already existing site and
|
||||
not 'force'
|
||||
ValueError - other errors (repre not found, misconfiguration)
|
||||
"""
|
||||
query = {
|
||||
"_id": ObjectId(representation_id)
|
||||
}
|
||||
|
||||
representation = list(self.connection.database[collection].find(query))
|
||||
representation = self.connection.database[collection].find_one(query)
|
||||
if not representation:
|
||||
raise ValueError("Representation {} not found in {}".
|
||||
format(representation_id, collection))
|
||||
|
|
@ -1463,7 +1619,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
representation, site_name, pause)
|
||||
else: # add new site to all files for representation
|
||||
self._add_site(collection, query, representation, elem, site_name,
|
||||
force)
|
||||
force=force)
|
||||
|
||||
def _update_site(self, collection, query, update, arr_filter):
|
||||
"""
|
||||
|
|
@ -1518,7 +1674,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Throws ValueError if 'site_name' not found on 'representation'
|
||||
"""
|
||||
found = False
|
||||
for repre_file in representation.pop().get("files"):
|
||||
for repre_file in representation.get("files"):
|
||||
for site in repre_file.get("sites"):
|
||||
if site.get("name") == site_name:
|
||||
found = True
|
||||
|
|
@ -1544,7 +1700,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
"""
|
||||
found = False
|
||||
site = None
|
||||
for repre_file in representation.pop().get("files"):
|
||||
for repre_file in representation.get("files"):
|
||||
for site in repre_file.get("sites"):
|
||||
if site["name"] == site_name:
|
||||
found = True
|
||||
|
|
@ -1576,29 +1732,34 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Adds 'site_name' to 'representation' on 'collection'
|
||||
|
||||
Args:
|
||||
representation (list of 1 dict)
|
||||
representation (dict)
|
||||
file_id (ObjectId)
|
||||
|
||||
Use 'force' to remove existing or raises ValueError
|
||||
"""
|
||||
reseted_existing = False
|
||||
for repre_file in representation.pop().get("files"):
|
||||
reset_existing = False
|
||||
files = representation.get("files", [])
|
||||
if not files:
|
||||
log.debug("No files for {}".format(representation["_id"]))
|
||||
return
|
||||
|
||||
for repre_file in files:
|
||||
if file_id and file_id != repre_file["_id"]:
|
||||
continue
|
||||
|
||||
for site in repre_file.get("sites"):
|
||||
if site["name"] == site_name:
|
||||
if force:
|
||||
if force or site.get("error"):
|
||||
self._reset_site_for_file(collection, query,
|
||||
elem, repre_file["_id"],
|
||||
site_name)
|
||||
reseted_existing = True
|
||||
reset_existing = True
|
||||
else:
|
||||
msg = "Site {} already present".format(site_name)
|
||||
log.info(msg)
|
||||
raise ValueError(msg)
|
||||
raise SiteAlreadyPresentError(msg)
|
||||
|
||||
if reseted_existing:
|
||||
if reset_existing:
|
||||
return
|
||||
|
||||
if not file_id:
|
||||
|
|
@ -1762,7 +1923,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
(int) - number of failed attempts
|
||||
"""
|
||||
_, rec = self._get_site_rec(file.get("sites", []), provider)
|
||||
return rec.get("tries", 0)
|
||||
return self._get_tries_count_from_rec(rec)
|
||||
|
||||
def _get_progress_dict(self, progress):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -8,6 +8,11 @@ class ResumableError(Exception):
|
|||
pass
|
||||
|
||||
|
||||
class SiteAlreadyPresentError(Exception):
|
||||
"""Representation has already site skeleton present."""
|
||||
pass
|
||||
|
||||
|
||||
class SyncStatus:
|
||||
DO_NOTHING = 0
|
||||
DO_UPLOAD = 1
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue