diff --git a/pype/ftrack/actions/action_create_cust_attrs.py b/pype/ftrack/actions/action_create_cust_attrs.py
index e2bd753b0e..5279a95a20 100644
--- a/pype/ftrack/actions/action_create_cust_attrs.py
+++ b/pype/ftrack/actions/action_create_cust_attrs.py
@@ -5,7 +5,8 @@ import json
import arrow
import logging
import ftrack_api
-from pype.ftrack import BaseAction, get_ca_mongoid
+from pype.ftrack import BaseAction
+from pype.ftrack.lib.avalon_sync import CustAttrIdKey
from pypeapp import config
from ftrack_api.exception import NoResultFoundError
@@ -171,7 +172,6 @@ class CustomAttributes(BaseAction):
def avalon_mongo_id_attributes(self, session):
# Attribute Name and Label
- cust_attr_name = get_ca_mongoid()
cust_attr_label = 'Avalon/Mongo Id'
# Types that don't need object_type_id
@@ -207,7 +207,7 @@ class CustomAttributes(BaseAction):
group = self.get_group('avalon')
data = {}
- data['key'] = cust_attr_name
+ data['key'] = CustAttrIdKey
data['label'] = cust_attr_label
data['type'] = custom_attribute_type
data['default'] = ''
diff --git a/pype/ftrack/actions/action_create_project_structure.py b/pype/ftrack/actions/action_create_project_structure.py
index c99c2df1e6..4589802f3a 100644
--- a/pype/ftrack/actions/action_create_project_structure.py
+++ b/pype/ftrack/actions/action_create_project_structure.py
@@ -142,6 +142,13 @@ class CreateProjectFolders(BaseAction):
else:
data['project_id'] = parent['project']['id']
+ existing_entity = self.session.query((
+ "TypedContext where name is \"{}\" and "
+ "parent_id is \"{}\" and project_id is \"{}\""
+ ).format(name, data['parent_id'], data['project_id'])).first()
+ if existing_entity:
+ return existing_entity
+
new_ent = self.session.create(ent_type, data)
self.session.commit()
return new_ent
diff --git a/pype/ftrack/actions/action_prepare_project.py b/pype/ftrack/actions/action_prepare_project.py
index 75bcf98cb3..4cc6cfd8df 100644
--- a/pype/ftrack/actions/action_prepare_project.py
+++ b/pype/ftrack/actions/action_prepare_project.py
@@ -2,12 +2,9 @@ import os
import json
from ruamel import yaml
-import ftrack_api
from pype.ftrack import BaseAction
from pypeapp import config
-from pype.ftrack.lib import get_avalon_attr
-
-from ftrack_api import session as fa_session
+from pype.ftrack.lib.avalon_sync import get_avalon_attr
class PrepareProject(BaseAction):
@@ -55,6 +52,8 @@ class PrepareProject(BaseAction):
attributes_to_set = {}
for attr in hier_cust_attrs:
key = attr["key"]
+ if key.startswith("avalon_"):
+ continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
@@ -65,6 +64,8 @@ class PrepareProject(BaseAction):
if attr["entity_type"].lower() != "show":
continue
key = attr["key"]
+ if key.startswith("avalon_"):
+ continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
diff --git a/pype/ftrack/actions/action_sync_to_avalon.py b/pype/ftrack/actions/action_sync_to_avalon.py
index 3ddcc1c794..01d0b866bf 100644
--- a/pype/ftrack/actions/action_sync_to_avalon.py
+++ b/pype/ftrack/actions/action_sync_to_avalon.py
@@ -1,2153 +1,9 @@
import os
-import collections
-import re
-import queue
import time
-import toml
import traceback
-from bson.objectid import ObjectId
-from bson.errors import InvalidId
-from pymongo import UpdateOne
-
-import avalon
from pype.ftrack import BaseAction
-from pype.ftrack.lib.io_nonsingleton import DbConnector
-import ftrack_api
-from ftrack_api import session as fa_session
-from pypeapp import Anatomy
-
-
-class SyncEntitiesFactory:
- dbcon = DbConnector()
-
- project_query = (
- "select full_name, name, custom_attributes"
- ", project_schema._task_type_schema.types.name"
- " from Project where full_name is \"{}\""
- )
- entities_query = (
- "select id, name, parent_id, link"
- " from TypedContext where project_id is \"{}\""
- )
- ignore_custom_attr_key = "avalon_ignore_sync"
- id_cust_attr = "avalon_mongo_id"
-
- entity_schemas = {
- "project": "avalon-core:project-2.0",
- "asset": "avalon-core:asset-3.0",
- "config": "avalon-core:config-1.0"
- }
-
- report_splitter = {"type": "label", "value": "---"}
-
- def __init__(self, log_obj, _session, project_full_name):
- self.log = log_obj
- self.session = ftrack_api.Session(
- server_url=_session.server_url,
- api_key=_session.api_key,
- api_user=_session.api_user,
- auto_connect_event_hub=True
- )
-
- self.cancel_auto_sync = False
-
- self.schema_patterns = {}
- self.duplicates = {}
- self.failed_regex = {}
- self.tasks_failed_regex = collections.defaultdict(list)
- self.report_items = {
- "info": collections.defaultdict(list),
- "warning": collections.defaultdict(list),
- "error": collections.defaultdict(list)
- }
-
- self.create_list = []
- self.recreated_ftrack_ents = {}
- self.updates = collections.defaultdict(dict)
-
- self._avalon_ents_by_id = None
- self._avalon_ents_by_ftrack_id = None
- self._avalon_ents_by_name = None
- self._avalon_ents_by_parent_id = None
-
- self._avalon_archived_ents = None
- self._avalon_archived_by_id = None
- self._avalon_archived_by_parent_id = None
- self._avalon_archived_by_name = None
-
- self._subsets_by_parent_id = None
- self._changeability_by_mongo_id = None
-
- self.all_filtered_entities = {}
- # self.all_filtered_ids = []
- self.filtered_ids = []
- self.not_selected_ids = []
-
- self._ent_pats_by_ftrack_id = {}
-
- # Get Ftrack project
- ft_project = self.session.query(
- self.project_query.format(project_full_name)
- ).one()
- ft_project_id = ft_project["id"]
-
- # Skip if project is ignored
- if ft_project["custom_attributes"].get(
- self.ignore_custom_attr_key
- ) is True:
- msg = (
- "Project \"{}\" has set `Ignore Sync` custom attribute to True"
- ).format(project_full_name)
- self.log.warning(msg)
- return {"success": False, "message": msg}
-
- # Check if `avalon_mongo_id` custom attribute exist or is accessible
- if self.id_cust_attr not in ft_project["custom_attributes"]:
- items = []
- items.append({
- "type": "label",
- "value": "# Can't access Custom attribute <{}>".format(
- self.id_cust_attr
- )
- })
- items.append({
- "type": "label",
- "value": (
- "
- Check if user \"{}\" has permissions"
- " to access the Custom attribute
"
- ).format(_session.api_key)
- })
- items.append({
- "type": "label",
- "value": "- Check if the Custom attribute exist
"
- })
- return {
- "items": items,
- "title": "Synchronization failed",
- "success": False,
- "message": "Synchronization failed"
- }
-
- # Find all entities in project
- all_project_entities = self.session.query(
- self.entities_query.format(ft_project_id)
- ).all()
-
- # Store entities by `id` and `parent_id`
- entities_dict = collections.defaultdict(lambda: {
- "children": list(),
- "parent_id": None,
- "entity": None,
- "entity_type": None,
- "name": None,
- "custom_attributes": {},
- "hier_attrs": {},
- "avalon_attrs": {},
- "tasks": []
- })
-
- for entity in all_project_entities:
- parent_id = entity["parent_id"]
- entity_type = entity.entity_type
- entity_type_low = entity_type.lower()
- if entity_type_low == "task":
- entities_dict[parent_id]["tasks"].append(entity["name"])
- continue
-
- entity_id = entity["id"]
- entities_dict[entity_id].update({
- "entity": entity,
- "parent_id": parent_id,
- "entity_type": entity_type_low,
- "entity_type_orig": entity_type,
- "name": entity["name"]
- })
- entities_dict[parent_id]["children"].append(entity_id)
-
- entities_dict[ft_project_id]["entity"] = ft_project
- entities_dict[ft_project_id]["entity_type"] = (
- ft_project.entity_type.lower()
- )
- entities_dict[ft_project_id]["entity_type_orig"] = (
- ft_project.entity_type
- )
- entities_dict[ft_project_id]["name"] = ft_project["full_name"]
-
- self.ft_project_id = ft_project_id
- self.entities_dict = entities_dict
-
- @property
- def avalon_ents_by_id(self):
- if self._avalon_ents_by_id is None:
- self._avalon_ents_by_id = {}
- for entity in self.avalon_entities:
- self._avalon_ents_by_id[str(entity["_id"])] = entity
-
- return self._avalon_ents_by_id
-
- @property
- def avalon_ents_by_ftrack_id(self):
- if self._avalon_ents_by_ftrack_id is None:
- self._avalon_ents_by_ftrack_id = {}
- for entity in self.avalon_entities:
- key = entity.get("data", {}).get("ftrackId")
- if not key:
- continue
- self._avalon_ents_by_ftrack_id[key] = str(entity["_id"])
-
- return self._avalon_ents_by_ftrack_id
-
- @property
- def avalon_ents_by_name(self):
- if self._avalon_ents_by_name is None:
- self._avalon_ents_by_name = {}
- for entity in self.avalon_entities:
- self._avalon_ents_by_name[entity["name"]] = str(entity["_id"])
-
- return self._avalon_ents_by_name
-
- @property
- def avalon_ents_by_parent_id(self):
- if self._avalon_ents_by_parent_id is None:
- self._avalon_ents_by_parent_id = collections.defaultdict(list)
- for entity in self.avalon_entities:
- parent_id = entity["data"]["visualParent"]
- if parent_id is not None:
- parent_id = str(parent_id)
- self._avalon_ents_by_parent_id[parent_id].append(entity)
-
- return self._avalon_ents_by_parent_id
-
- @property
- def avalon_archived_ents(self):
- if self._avalon_archived_ents is None:
- self._avalon_archived_ents = [
- ent for ent in self.dbcon.find({"type": "archived_asset"})
- ]
- return self._avalon_archived_ents
-
- @property
- def avalon_archived_by_name(self):
- if self._avalon_archived_by_name is None:
- self._avalon_archived_by_name = collections.defaultdict(list)
- for ent in self.avalon_archived_ents:
- self._avalon_archived_by_name[ent["name"]].append(ent)
- return self._avalon_archived_by_name
-
- @property
- def avalon_archived_by_id(self):
- if self._avalon_archived_by_id is None:
- self._avalon_archived_by_id = {
- str(ent["_id"]): ent for ent in self.avalon_archived_ents
- }
- return self._avalon_archived_by_id
-
- @property
- def avalon_archived_by_parent_id(self):
- if self._avalon_archived_by_parent_id is None:
- self._avalon_archived_by_parent_id = collections.defaultdict(list)
- for entity in self.avalon_archived_ents:
- parent_id = entity["data"]["visualParent"]
- if parent_id is not None:
- parent_id = str(parent_id)
- self._avalon_archived_by_parent_id[parent_id].append(entity)
-
- return self._avalon_archived_by_parent_id
-
- @property
- def subsets_by_parent_id(self):
- if self._subsets_by_parent_id is None:
- self._subsets_by_parent_id = collections.defaultdict(list)
- for subset in self.dbcon.find({"type": "subset"}):
- self._subsets_by_parent_id[str(subset["parent"])].append(
- subset
- )
-
- return self._subsets_by_parent_id
-
- @property
- def changeability_by_mongo_id(self):
- if self._changeability_by_mongo_id is None:
- self._changeability_by_mongo_id = collections.defaultdict(
- lambda: True
- )
- self._changeability_by_mongo_id[self.avalon_project_id] = False
- self._bubble_changeability(list(self.subsets_by_parent_id.keys()))
- return self._changeability_by_mongo_id
-
- @property
- def all_ftrack_names(self):
- return [
- ent_dict["name"] for ent_dict in self.entities_dict.values() if (
- ent_dict.get("name")
- )
- ]
-
- def duplicity_regex_check(self):
- self.log.debug("* Checking duplicities and invalid symbols")
- # Duplicity and regex check
- entity_ids_by_name = {}
- duplicates = []
- failed_regex = []
- task_names = {}
- for ftrack_id, entity_dict in self.entities_dict.items():
- regex_check = True
- name = entity_dict["name"]
- entity_type = entity_dict["entity_type"]
- # Tasks must be checked too
- for task_name in entity_dict["tasks"]:
- passed = task_names.get(task_name)
- if passed is None:
- passed = self.check_regex(task_name, "task")
- task_names[task_name] = passed
-
- if not passed:
- self.tasks_failed_regex[task_name].append(ftrack_id)
-
- if name in entity_ids_by_name:
- duplicates.append(name)
- else:
- entity_ids_by_name[name] = []
- regex_check = self.check_regex(name, entity_type)
-
- entity_ids_by_name[name].append(ftrack_id)
- if not regex_check:
- failed_regex.append(name)
-
- for name in failed_regex:
- self.failed_regex[name] = entity_ids_by_name[name]
-
- for name in duplicates:
- self.duplicates[name] = entity_ids_by_name[name]
-
- self.filter_by_duplicate_regex()
-
- def check_regex(self, name, entity_type, in_schema=None):
- schema_name = "asset-3.0"
- if in_schema:
- schema_name = in_schema
- elif entity_type == "project":
- schema_name = "project-2.0"
- elif entity_type == "task":
- schema_name = "task"
-
- name_pattern = self.schema_patterns.get(schema_name)
- if not name_pattern:
- default_pattern = "^[a-zA-Z0-9_.]*$"
- schema_obj = avalon.schema._cache.get(schema_name + ".json")
- if not schema_obj:
- name_pattern = default_pattern
- else:
- name_pattern = schema_obj.get(
- "properties", {}).get(
- "name", {}).get(
- "pattern", default_pattern
- )
- self.schema_patterns[schema_name] = name_pattern
-
- if re.match(name_pattern, name):
- return True
- return False
-
- def filter_by_duplicate_regex(self):
- filter_queue = queue.Queue()
- failed_regex_msg = "{} - Entity has invalid symbols in the name"
- duplicate_msg = "There are multiple entities with the name: \"{}\":"
-
- for ids in self.failed_regex.values():
- for id in ids:
- ent_path = self.get_ent_path(id)
- self.log.warning(failed_regex_msg.format(ent_path))
- filter_queue.put(id)
-
- for name, ids in self.duplicates.items():
- self.log.warning(duplicate_msg.format(name))
- for id in ids:
- ent_path = self.get_ent_path(id)
- self.log.warning(ent_path)
- filter_queue.put(id)
-
- filtered_ids = []
- while not filter_queue.empty():
- ftrack_id = filter_queue.get()
- if ftrack_id in filtered_ids:
- continue
-
- entity_dict = self.entities_dict.pop(ftrack_id, {})
- if not entity_dict:
- continue
-
- self.all_filtered_entities[ftrack_id] = entity_dict
- parent_id = entity_dict.get("parent_id")
- if parent_id and parent_id in self.entities_dict:
- if ftrack_id in self.entities_dict[parent_id]["children"]:
- self.entities_dict[parent_id]["children"].remove(ftrack_id)
-
- filtered_ids.append(ftrack_id)
- for child_id in entity_dict.get("children", []):
- filter_queue.put(child_id)
-
- # self.all_filtered_ids.extend(filtered_ids)
-
- for name, ids in self.tasks_failed_regex.items():
- for id in ids:
- if id not in self.entities_dict:
- continue
- self.entities_dict[id]["tasks"].remove(name)
- ent_path = self.get_ent_path(id)
- self.log.warning(failed_regex_msg.format(
- "/".join([ent_path, name])
- ))
-
- def filter_by_ignore_sync(self):
- # skip filtering if `ignore_sync` attribute do not exist
- if self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
- self.ignore_custom_attr_key, "_notset_"
- ) == "_notset_":
- return
-
- self.filter_queue = queue.Queue()
- self.filter_queue.put((self.ft_project_id, False))
- while not self.filter_queue.empty():
- parent_id, remove = self.filter_queue.get()
- if remove:
- parent_dict = self.entities_dict.pop(parent_id, {})
- self.all_filtered_entities[parent_id] = parent_dict
- self.filtered_ids.append(parent_id)
- else:
- parent_dict = self.entities_dict.get(parent_id, {})
-
- for child_id in parent_dict.get("children", []):
- # keep original `remove` value for all childs
- _remove = (remove is True)
- if not _remove:
- if self.entities_dict[child_id]["avalon_attrs"].get(
- self.ignore_custom_attr_key
- ):
- self.entities_dict[parent_id]["children"].remove(
- child_id
- )
- _remove = True
- self.filter_queue.put((child_id, _remove))
-
- # self.all_filtered_ids.extend(self.filtered_ids)
-
- def filter_by_selection(self, event):
- # BUGGY!!!! cause that entities are in deleted list
- # TODO may be working when filtering happen after preparations
- # - But this part probably does not have any functional reason
- # - Time of synchronization probably won't be changed much
- selected_ids = []
- for entity in event["data"]["selection"]:
- # Skip if project is in selection
- if entity["entityType"] == "show":
- return
- selected_ids.append(entity["entityId"])
-
- sync_ids = [self.ft_project_id]
- parents_queue = queue.Queue()
- children_queue = queue.Queue()
- for id in selected_ids:
- # skip if already filtered with ignore sync custom attribute
- if id in self.filtered_ids:
- continue
-
- parents_queue.put(id)
- children_queue.put(id)
-
- while not parents_queue.empty():
- id = parents_queue.get()
- while True:
- # Stops when parent is in sync_ids
- if id in self.filtered_ids or id in sync_ids or id is None:
- break
- sync_ids.append(id)
- id = self.entities_dict[id]["parent_id"]
-
- while not children_queue.empty():
- parent_id = children_queue.get()
- for child_id in self.entities_dict[parent_id]["children"]:
- if child_id in sync_ids or child_id in self.filtered_ids:
- continue
- sync_ids.append(child_id)
- children_queue.put(child_id)
-
- # separate not selected and to process entities
- for key, value in self.entities_dict.items():
- if key not in sync_ids:
- self.not_selected_ids.append(key)
-
- for id in self.not_selected_ids:
- # pop from entities
- value = self.entities_dict.pop(id)
- # remove entity from parent's children
- parent_id = value["parent_id"]
- if parent_id not in sync_ids:
- continue
-
- self.entities_dict[parent_id]["children"].remove(id)
-
- def set_cutom_attributes(self):
- self.log.debug("* Preparing custom attributes")
- # Get custom attributes and values
- custom_attrs, hier_attrs = self.get_avalon_attr(True)
- ent_types = self.session.query("select id, name from ObjectType").all()
- ent_types_by_name = {
- ent_type["name"]: ent_type["id"] for ent_type in ent_types
- }
-
- attrs = set()
- # store default values per entity type
- attrs_per_entity_type = collections.defaultdict(dict)
- avalon_attrs = collections.defaultdict(dict)
- # store also custom attribute configuration id for future use (create)
- attrs_per_entity_type_ca_id = collections.defaultdict(dict)
- avalon_attrs_ca_id = collections.defaultdict(dict)
-
- for cust_attr in custom_attrs:
- key = cust_attr["key"]
- attrs.add(key)
- ca_ent_type = cust_attr["entity_type"]
- if key.startswith("avalon_"):
- if ca_ent_type == "show":
- avalon_attrs[ca_ent_type][key] = cust_attr["default"]
- avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"]
- else:
- obj_id = cust_attr["object_type_id"]
- avalon_attrs[obj_id][key] = cust_attr["default"]
- avalon_attrs_ca_id[obj_id][key] = cust_attr["id"]
- continue
-
- if ca_ent_type == "show":
- attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"]
- attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"]
- else:
- obj_id = cust_attr["object_type_id"]
- attrs_per_entity_type[obj_id][key] = cust_attr["default"]
- attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"]
-
- obj_id_ent_type_map = {}
- sync_ids = []
- for entity_id, entity_dict in self.entities_dict.items():
- sync_ids.append(entity_id)
- entity_type = entity_dict["entity_type"]
- entity_type_orig = entity_dict["entity_type_orig"]
-
- if entity_type == "project":
- attr_key = "show"
- else:
- map_key = obj_id_ent_type_map.get(entity_type_orig)
- if not map_key:
- # Put space between capitals
- # (e.g. 'AssetBuild' -> 'Asset Build')
- map_key = re.sub(
- r"(\w)([A-Z])", r"\1 \2", entity_type_orig
- )
- obj_id_ent_type_map[entity_type_orig] = map_key
-
- # Get object id of entity type
- attr_key = ent_types_by_name.get(map_key)
-
- # Backup soluction when id is not found by prequeried objects
- if not attr_key:
- query = "ObjectType where name is \"{}\"".format(map_key)
- attr_key = self.session.query(query).one()["id"]
- ent_types_by_name[map_key] = attr_key
-
- prepared_attrs = attrs_per_entity_type.get(attr_key)
- prepared_avalon_attr = avalon_attrs.get(attr_key)
- prepared_attrs_ca_id = attrs_per_entity_type_ca_id.get(attr_key)
- prepared_avalon_attr_ca_id = avalon_attrs_ca_id.get(attr_key)
- if prepared_attrs:
- self.entities_dict[entity_id]["custom_attributes"] = (
- prepared_attrs.copy()
- )
- if prepared_attrs_ca_id:
- self.entities_dict[entity_id]["custom_attributes_id"] = (
- prepared_attrs_ca_id.copy()
- )
- if prepared_avalon_attr:
- self.entities_dict[entity_id]["avalon_attrs"] = (
- prepared_avalon_attr.copy()
- )
- if prepared_avalon_attr_ca_id:
- self.entities_dict[entity_id]["avalon_attrs_id"] = (
- prepared_avalon_attr_ca_id.copy()
- )
-
- # TODO query custom attributes by entity_id
- entity_ids_joined = ", ".join([
- "\"{}\"".format(id) for id in sync_ids
- ])
- attributes_joined = ", ".join([
- "\"{}\"".format(name) for name in attrs
- ])
-
- cust_attr_query = (
- "select value, entity_id from ContextCustomAttributeValue "
- "where entity_id in ({}) and configuration.key in ({})"
- )
- call_expr = [{
- "action": "query",
- "expression": cust_attr_query.format(
- entity_ids_joined, attributes_joined
- )
- }]
- if hasattr(self.session, "_call"):
- [values] = self.session._call(call_expr)
- else:
- [values] = self.session.call(call_expr)
-
- for value in values["data"]:
- entity_id = value["entity_id"]
- key = value["configuration"]["key"]
- store_key = "custom_attributes"
- if key.startswith("avalon_"):
- store_key = "avalon_attrs"
- self.entities_dict[entity_id][store_key][key] = value["value"]
-
- # process hierarchical attributes
- self.set_hierarchical_attribute(hier_attrs, sync_ids)
-
- def set_hierarchical_attribute(self, hier_attrs, sync_ids):
- # collect all hierarchical attribute keys
- # and prepare default values to project
- attribute_names = []
- for attr in hier_attrs:
- key = attr["key"]
- attribute_names.append(key)
-
- store_key = "hier_attrs"
- if key.startswith("avalon_"):
- store_key = "avalon_attrs"
-
- self.entities_dict[self.ft_project_id][store_key][key] = (
- attr["default"]
- )
-
- # Prepare dict with all hier keys and None values
- prepare_dict = {}
- prepare_dict_avalon = {}
- for attr in attribute_names:
- if attr.startswith("avalon_"):
- prepare_dict_avalon[attr] = None
- else:
- prepare_dict[attr] = None
-
- for id, entity_dict in self.entities_dict.items():
- # Skip project because has stored defaults at the moment
- if entity_dict["entity_type"] == "project":
- continue
- entity_dict["hier_attrs"] = prepare_dict.copy()
- for key, val in prepare_dict_avalon.items():
- entity_dict["avalon_attrs"][key] = val
-
- # Prepare values to query
- entity_ids_joined = ", ".join([
- "\"{}\"".format(id) for id in sync_ids
- ])
- attributes_joined = ", ".join([
- "\"{}\"".format(name) for name in attribute_names
- ])
- call_expr = [{
- "action": "query",
- "expression": (
- "select value, entity_id from ContextCustomAttributeValue "
- "where entity_id in ({}) and configuration.key in ({})"
- ).format(entity_ids_joined, attributes_joined)
- }]
- if hasattr(self.session, "_call"):
- [values] = self.session._call(call_expr)
- else:
- [values] = self.session.call(call_expr)
-
- avalon_hier = []
- for value in values["data"]:
- if value["value"] is None:
- continue
- entity_id = value["entity_id"]
- key = value["configuration"]["key"]
- store_key = "hier_attrs"
- if key.startswith("avalon_"):
- store_key = "avalon_attrs"
- avalon_hier.append(key)
- self.entities_dict[entity_id][store_key][key] = value["value"]
-
- # Get dictionary with not None hierarchical values to pull to childs
- top_id = self.ft_project_id
- project_values = {}
- for key, value in self.entities_dict[top_id]["hier_attrs"].items():
- if value is not None:
- project_values[key] = value
-
- for key in avalon_hier:
- value = self.entities_dict[top_id]["avalon_attrs"][key]
- if value is not None:
- project_values[key] = value
-
- hier_down_queue = queue.Queue()
- hier_down_queue.put((project_values, top_id))
-
- while not hier_down_queue.empty():
- hier_values, parent_id = hier_down_queue.get()
- for child_id in self.entities_dict[parent_id]["children"]:
- _hier_values = hier_values.copy()
- for name in attribute_names:
- store_key = "hier_attrs"
- if name.startswith("avalon_"):
- store_key = "avalon_attrs"
- value = self.entities_dict[child_id][store_key][name]
- if value is not None:
- _hier_values[name] = value
-
- self.entities_dict[child_id]["hier_attrs"].update(_hier_values)
- hier_down_queue.put((_hier_values, child_id))
-
- def remove_from_archived(self, mongo_id):
- entity = self.avalon_archived_by_id.pop(mongo_id, None)
- if not entity:
- return
-
- if self._avalon_archived_ents is not None:
- if entity in self._avalon_archived_ents:
- self._avalon_archived_ents.remove(entity)
-
- if self._avalon_archived_by_name is not None:
- name = entity["name"]
- if name in self._avalon_archived_by_name:
- name_ents = self._avalon_archived_by_name[name]
- if entity in name_ents:
- if len(name_ents) == 1:
- self._avalon_archived_by_name.pop(name)
- else:
- self._avalon_archived_by_name[name].remove(entity)
-
- # TODO use custom None instead of __NOTSET__
- if self._avalon_archived_by_parent_id is not None:
- parent_id = entity.get("data", {}).get(
- "visualParent", "__NOTSET__"
- )
- if parent_id is not None:
- parent_id = str(parent_id)
-
- if parent_id in self._avalon_archived_by_parent_id:
- parent_list = self._avalon_archived_by_parent_id[parent_id]
- if entity not in parent_list:
- self._avalon_archived_by_parent_id[parent_id].remove(
- entity
- )
-
- def prepare_ftrack_ent_data(self):
- not_set_ids = []
- for id, entity_dict in self.entities_dict.items():
- entity = entity_dict["entity"]
- if entity is None:
- not_set_ids.append(id)
- continue
-
- self.entities_dict[id]["final_entity"] = {}
- self.entities_dict[id]["final_entity"]["name"] = (
- entity_dict["name"]
- )
- data = {}
- data["ftrackId"] = entity["id"]
- data["entityType"] = entity_dict["entity_type_orig"]
-
- for key, val in entity_dict.get("custom_attributes", []).items():
- data[key] = val
-
- for key, val in entity_dict.get("hier_attrs", []).items():
- data[key] = val
-
- if id == self.ft_project_id:
- data["code"] = entity["name"]
- self.entities_dict[id]["final_entity"]["data"] = data
- self.entities_dict[id]["final_entity"]["type"] = "project"
-
- proj_schema = entity["project_schema"]
- task_types = proj_schema["_task_type_schema"]["types"]
- self.entities_dict[id]["final_entity"]["config"] = {
- "tasks": [{"name": tt["name"]} for tt in task_types],
- "apps": self.get_project_apps(data)
- }
- continue
-
- ent_path_items = [ent["name"] for ent in entity["link"]]
- parents = ent_path_items[1:len(ent_path_items)-1:]
- hierarchy = ""
- if len(parents) > 0:
- hierarchy = os.path.sep.join(parents)
-
- data["parents"] = parents
- data["hierarchy"] = hierarchy
- data["tasks"] = self.entities_dict[id].pop("tasks", [])
- self.entities_dict[id]["final_entity"]["data"] = data
- self.entities_dict[id]["final_entity"]["type"] = "asset"
-
- if not_set_ids:
- self.log.debug((
- "- Debug information: Filtering bug, there are empty dicts"
- "in entities dict (functionality should not be affected) <{}>"
- ).format("| ".join(not_set_ids)))
- for id in not_set_ids:
- self.entities_dict.pop(id)
-
- def get_project_apps(self, proj_data):
- apps = []
- missing_toml_msg = "Missing config file for application"
- error_msg = (
- "Unexpected error happend during preparation of application"
- )
- for app in proj_data.get("applications"):
- try:
- toml_path = avalon.lib.which_app(app)
- # TODO report
- if not toml_path:
- self.log.warning(missing_toml_msg + '"{}"'.format(app))
- self.report_items["warning"][missing_toml_msg].append(app)
- continue
-
- apps.append({
- "name": app,
- "label": toml.load(toml_path)["label"]
- })
- except Exception:
- # TODO report
- self.report_items["warning"][error_msg].append(app)
- self.log.warning((
- "Error has happened during preparing application \"{}\""
- ).format(app), exc_info=True)
- return apps
-
- def get_ent_path(self, ftrack_id):
- ent_path = self._ent_pats_by_ftrack_id.get(ftrack_id)
- if not ent_path:
- entity = self.entities_dict[ftrack_id]["entity"]
- ent_path = "/".join(
- [ent["name"] for ent in entity["link"]]
- )
- self._ent_pats_by_ftrack_id[ftrack_id] = ent_path
-
- return ent_path
-
- def prepare_avalon_entities(self, ft_project_name):
- self.log.debug((
- "* Preparing avalon entities "
- "(separate to Create, Update and Deleted groups)"
- ))
- # Avalon entities
- self.dbcon.install()
- self.dbcon.Session["AVALON_PROJECT"] = ft_project_name
- avalon_project = self.dbcon.find_one({"type": "project"})
- avalon_entities = self.dbcon.find({"type": "asset"})
- self.avalon_project = avalon_project
- self.avalon_entities = avalon_entities
-
- ftrack_avalon_mapper = {}
- avalon_ftrack_mapper = {}
- create_ftrack_ids = []
- update_ftrack_ids = []
-
- same_mongo_id = []
- all_mongo_ids = {}
- for ftrack_id, entity_dict in self.entities_dict.items():
- mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
- if not mongo_id:
- continue
- if mongo_id in all_mongo_ids:
- same_mongo_id.append(mongo_id)
- else:
- all_mongo_ids[mongo_id] = []
- all_mongo_ids[mongo_id].append(ftrack_id)
-
- if avalon_project:
- mongo_id = str(avalon_project["_id"])
- ftrack_avalon_mapper[self.ft_project_id] = mongo_id
- avalon_ftrack_mapper[mongo_id] = self.ft_project_id
- update_ftrack_ids.append(self.ft_project_id)
- else:
- create_ftrack_ids.append(self.ft_project_id)
-
- # make it go hierarchically
- prepare_queue = queue.Queue()
-
- for child_id in self.entities_dict[self.ft_project_id]["children"]:
- prepare_queue.put(child_id)
-
- while not prepare_queue.empty():
- ftrack_id = prepare_queue.get()
- for child_id in self.entities_dict[ftrack_id]["children"]:
- prepare_queue.put(child_id)
-
- entity_dict = self.entities_dict[ftrack_id]
- ent_path = self.get_ent_path(ftrack_id)
-
- mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
- av_ent_by_mongo_id = self.avalon_ents_by_id.get(mongo_id)
- if av_ent_by_mongo_id:
- av_ent_ftrack_id = av_ent_by_mongo_id.get("data", {}).get(
- "ftrackId"
- )
- is_right = False
- else_match_better = False
- if av_ent_ftrack_id and av_ent_ftrack_id == ftrack_id:
- is_right = True
-
- elif mongo_id not in same_mongo_id:
- is_right = True
-
- else:
- ftrack_ids_with_same_mongo = all_mongo_ids[mongo_id]
- for _ftrack_id in ftrack_ids_with_same_mongo:
- if _ftrack_id == av_ent_ftrack_id:
- continue
-
- _entity_dict = self.entities_dict[_ftrack_id]
- _mongo_id = _entity_dict["avalon_attrs"][
- self.id_cust_attr
- ]
- _av_ent_by_mongo_id = self.avalon_ents_by_id.get(
- _mongo_id
- )
- _av_ent_ftrack_id = _av_ent_by_mongo_id.get(
- "data", {}
- ).get("ftrackId")
- if _av_ent_ftrack_id == ftrack_id:
- else_match_better = True
- break
-
- if not is_right and not else_match_better:
- entity = entity_dict["entity"]
- ent_path_items = [ent["name"] for ent in entity["link"]]
- parents = ent_path_items[1:len(ent_path_items)-1:]
- av_parents = av_ent_by_mongo_id["data"]["parents"]
- if av_parents == parents:
- is_right = True
- else:
- name = entity_dict["name"]
- av_name = av_ent_by_mongo_id["name"]
- if name == av_name:
- is_right = True
-
- if is_right:
- self.log.debug(
- "Existing (by MongoID) <{}>".format(ent_path)
- )
- ftrack_avalon_mapper[ftrack_id] = mongo_id
- avalon_ftrack_mapper[mongo_id] = ftrack_id
- update_ftrack_ids.append(ftrack_id)
- continue
-
- mongo_id = self.avalon_ents_by_ftrack_id.get(ftrack_id)
- if not mongo_id:
- mongo_id = self.avalon_ents_by_name.get(entity_dict["name"])
- if mongo_id:
- self.log.debug(
- "Existing (by matching name) <{}>".format(ent_path)
- )
- else:
- self.log.debug(
- "Existing (by FtrackID in mongo) <{}>".format(ent_path)
- )
-
- if mongo_id:
- ftrack_avalon_mapper[ftrack_id] = mongo_id
- avalon_ftrack_mapper[mongo_id] = ftrack_id
- update_ftrack_ids.append(ftrack_id)
- continue
-
- self.log.debug("New <{}>".format(ent_path))
- create_ftrack_ids.append(ftrack_id)
-
- deleted_entities = []
- for mongo_id in self.avalon_ents_by_id:
- if mongo_id in avalon_ftrack_mapper:
- continue
- deleted_entities.append(mongo_id)
-
- av_ent = self.avalon_ents_by_id[mongo_id]
- av_ent_path_items = [p for p in av_ent["data"]["parents"]]
- av_ent_path_items.append(av_ent["name"])
- self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items)))
-
- self.ftrack_avalon_mapper = ftrack_avalon_mapper
- self.avalon_ftrack_mapper = avalon_ftrack_mapper
- self.create_ftrack_ids = create_ftrack_ids
- self.update_ftrack_ids = update_ftrack_ids
- self.deleted_entities = deleted_entities
-
- self.log.debug((
- "Ftrack -> Avalon comparison: New <{}> "
- "| Existing <{}> | Deleted <{}>"
- ).format(
- len(create_ftrack_ids),
- len(update_ftrack_ids),
- len(deleted_entities)
- ))
-
- def filter_with_children(self, ftrack_id):
- if ftrack_id not in self.entities_dict:
- return
- ent_dict = self.entities_dict[ftrack_id]
- parent_id = ent_dict["parent_id"]
- self.entities_dict[parent_id]["children"].remove(ftrack_id)
-
- children_queue = queue.Queue()
- children_queue.put(ftrack_id)
- while not children_queue.empty():
- _ftrack_id = children_queue.get()
- entity_dict = self.entities_dict.pop(_ftrack_id, {"children": []})
- for child_id in entity_dict["children"]:
- children_queue.put(child_id)
-
- def prepare_changes(self):
- self.log.debug("* Preparing changes for avalon/ftrack")
- hierarchy_changing_ids = []
- ignore_keys = collections.defaultdict(list)
-
- update_queue = queue.Queue()
- for ftrack_id in self.update_ftrack_ids:
- update_queue.put(ftrack_id)
-
- while not update_queue.empty():
- ftrack_id = update_queue.get()
- if ftrack_id == self.ft_project_id:
- changes = self.prepare_project_changes()
- if changes:
- self.updates[self.avalon_project_id] = changes
- continue
-
- ftrack_ent_dict = self.entities_dict[ftrack_id]
-
- # *** check parents
- parent_check = False
-
- ftrack_parent_id = ftrack_ent_dict["parent_id"]
- avalon_id = self.ftrack_avalon_mapper[ftrack_id]
- avalon_entity = self.avalon_ents_by_id[avalon_id]
- avalon_parent_id = avalon_entity["data"]["visualParent"]
- if avalon_parent_id is not None:
- avalon_parent_id = str(avalon_parent_id)
-
- ftrack_parent_mongo_id = self.ftrack_avalon_mapper[
- ftrack_parent_id
- ]
-
- # if parent is project
- if (ftrack_parent_mongo_id == avalon_parent_id) or (
- ftrack_parent_id == self.ft_project_id and
- avalon_parent_id is None
- ):
- parent_check = True
-
- # check name
- ftrack_name = ftrack_ent_dict["name"]
- avalon_name = avalon_entity["name"]
- name_check = ftrack_name == avalon_name
-
- # IDEAL STATE: both parent and name check passed
- if parent_check and name_check:
- continue
-
- # If entity is changeable then change values of parent or name
- if self.changeability_by_mongo_id[avalon_id]:
- # TODO logging
- if not parent_check:
- if ftrack_parent_mongo_id == str(self.avalon_project_id):
- new_parent_name = self.entities_dict[
- self.ft_project_id]["name"]
- new_parent_id = None
- else:
- new_parent_name = self.avalon_ents_by_id[
- ftrack_parent_mongo_id]["name"]
- new_parent_id = ObjectId(ftrack_parent_mongo_id)
-
- if avalon_parent_id == str(self.avalon_project_id):
- old_parent_name = self.entities_dict[
- self.ft_project_id]["name"]
- else:
- old_parent_name = self.avalon_ents_by_id[
- ftrack_parent_mongo_id]["name"]
-
- self.updates[avalon_id]["data"] = {
- "visualParent": new_parent_id
- }
- ignore_keys[ftrack_id].append("data.visualParent")
- self.log.debug((
- "Avalon entity \"{}\" changed parent \"{}\" -> \"{}\""
- ).format(avalon_name, old_parent_name, new_parent_name))
-
- if not name_check:
- self.updates[avalon_id]["name"] = ftrack_name
- ignore_keys[ftrack_id].append("name")
- self.log.debug(
- "Avalon entity \"{}\" was renamed to \"{}\"".format(
- avalon_name, ftrack_name
- )
- )
- continue
-
- # parents and hierarchy must be recalculated
- hierarchy_changing_ids.append(ftrack_id)
-
- # Parent is project if avalon_parent_id is set to None
- if avalon_parent_id is None:
- avalon_parent_id = str(self.avalon_project_id)
-
- if not name_check:
- ent_path = self.get_ent_path(ftrack_id)
- # TODO report
- # TODO logging
- self.entities_dict[ftrack_id]["name"] = avalon_name
- self.entities_dict[ftrack_id]["entity"]["name"] = (
- avalon_name
- )
- self.entities_dict[ftrack_id]["final_entity"]["name"] = (
- avalon_name
- )
- self.log.warning("Name was changed back to {} <{}>".format(
- avalon_name, ent_path
- ))
- self._ent_pats_by_ftrack_id.pop(ftrack_id, None)
- msg = (
- " It is not possible to change"
- " the name of an entity or it's parents, "
- " if it already contained published data."
- )
- self.report_items["warning"][msg].append(ent_path)
-
- # skip parent oricessing if hierarchy didn't change
- if parent_check:
- continue
-
- # Logic when parenting(hierarchy) has changed and should not
- old_ftrack_parent_id = self.avalon_ftrack_mapper.get(
- avalon_parent_id
- )
-
- # If last ftrack parent id from mongo entity exist then just
- # remap paren_id on entity
- if old_ftrack_parent_id:
- # TODO report
- # TODO logging
- ent_path = self.get_ent_path(ftrack_id)
- msg = (
- " It is not possible"
- " to change the hierarchy of an entity or it's parents,"
- " if it already contained published data."
- )
- self.report_items["warning"][msg].append(ent_path)
- self.log.warning((
- " Entity contains published data so it was moved"
- " back to it's original hierarchy <{}>"
- ).format(ent_path))
- self.entities_dict[ftrack_id]["entity"]["parent_id"] = (
- old_ftrack_parent_id
- )
- self.entities_dict[ftrack_id]["parent_id"] = (
- old_ftrack_parent_id
- )
- self.entities_dict[old_ftrack_parent_id][
- "children"
- ].append(ftrack_id)
-
- continue
-
- old_parent_ent = self.avalon_ents_by_id.get(avalon_parent_id)
- if not old_parent_ent:
- old_parent_ent = self.avalon_archived_by_id.get(
- avalon_parent_id
- )
-
- # TODO report
- # TODO logging
- if not old_parent_ent:
- self.log.warning((
- "Parent entity was not found by id"
- " - Trying to find by parent name"
- ))
- ent_path = self.get_ent_path(ftrack_id)
-
- parents = avalon_entity["data"]["parents"]
- parent_name = parents[-1]
- matching_entity_id = None
- for id, entity_dict in self.entities_dict.items():
- if entity_dict["name"] == parent_name:
- matching_entity_id = id
- break
-
- if matching_entity_id is None:
- # TODO logging
- # TODO report (turn off auto-sync?)
- self.log.error((
- "The entity contains published data but it was moved to"
- " a different place in the hierarchy and it's previous"
- " parent cannot be found."
- " It's impossible to solve this programmatically <{}>"
- ).format(ent_path))
- msg = (
- " Hierarchy of an entity" " can't be changed due to published data and missing"
- " previous parent"
- )
- self.report_items["error"][msg].append(ent_path)
- self.filter_with_children(ftrack_id)
- continue
-
- matching_ent_dict = self.entities_dict.get(matching_entity_id)
- match_ent_parents = matching_ent_dict.get(
- "final_entity", {}).get(
- "data", {}).get(
- "parents", ["__NOT_SET__"]
- )
- # TODO logging
- # TODO report
- if (
- len(match_ent_parents) >= len(parents) or
- match_ent_parents[:-1] != parents
- ):
- ent_path = self.get_ent_path(ftrack_id)
- self.log.error((
- "The entity contains published data but it was moved to"
- " a different place in the hierarchy and it's previous"
- " parents were moved too."
- " It's impossible to solve this programmatically <{}>"
- ).format(ent_path))
- msg = (
- " Hierarchy of an entity"
- " can't be changed due to published data and scrambled"
- "hierarchy"
- )
- continue
-
- old_parent_ent = matching_ent_dict["final_entity"]
-
- parent_id = self.ft_project_id
- entities_to_create = []
- # TODO logging
- self.log.warning(
- "Ftrack entities must be recreated because they were deleted,"
- " but they contain published data."
- )
-
- _avalon_ent = old_parent_ent
-
- self.updates[avalon_parent_id] = {"type": "asset"}
- success = True
- while True:
- _vis_par = _avalon_ent["data"]["visualParent"]
- _name = _avalon_ent["name"]
- if _name in self.all_ftrack_names:
- av_ent_path_items = _avalon_ent["data"]["parents"]
- av_ent_path_items.append(_name)
- av_ent_path = "/".join(av_ent_path_items)
- # TODO report
- # TODO logging
- self.log.error((
- "Can't recreate the entity in Ftrack because an entity" " with the same name already exists in a different"
- " place in the hierarchy <{}>"
- ).format(av_ent_path))
- msg = (
- " Hierarchy of an entity"
- " can't be changed. I contains published data and it's" " previous parent had a name, that is duplicated at a "
- " different hierarchy level"
- )
- self.report_items["error"][msg].append(av_ent_path)
- self.filter_with_children(ftrack_id)
- success = False
- break
-
- entities_to_create.append(_avalon_ent)
- if _vis_par is None:
- break
-
- _vis_par = str(_vis_par)
- _mapped = self.avalon_ftrack_mapper.get(_vis_par)
- if _mapped:
- parent_id = _mapped
- break
-
- _avalon_ent = self.avalon_ents_by_id.get(_vis_par)
- if not _avalon_ent:
- _avalon_ent = self.avalon_archived_by_id.get(_vis_par)
-
- if success is False:
- continue
-
- new_entity_id = None
- for av_entity in reversed(entities_to_create):
- new_entity_id = self.create_ftrack_ent_from_avalon_ent(
- av_entity, parent_id
- )
- update_queue.put(new_entity_id)
-
- if new_entity_id:
- ftrack_ent_dict["entity"]["parent_id"] = new_entity_id
-
- if hierarchy_changing_ids:
- self.reload_parents(hierarchy_changing_ids)
-
- for ftrack_id in self.update_ftrack_ids:
- if ftrack_id == self.ft_project_id:
- continue
-
- avalon_id = self.ftrack_avalon_mapper[ftrack_id]
- avalon_entity = self.avalon_ents_by_id[avalon_id]
-
- avalon_attrs = self.entities_dict[ftrack_id]["avalon_attrs"]
- if (
- self.id_cust_attr not in avalon_attrs or
- avalon_attrs[self.id_cust_attr] != avalon_id
- ):
- configuration_id = self.entities_dict[ftrack_id][
- "avalon_attrs_id"][self.id_cust_attr]
-
- _entity_key = collections.OrderedDict({
- "configuration_id": configuration_id,
- "entity_id": ftrack_id
- })
-
- self.session.recorded_operations.push(
- fa_session.ftrack_api.operation.UpdateEntityOperation(
- "ContextCustomAttributeValue",
- _entity_key,
- "value",
- fa_session.ftrack_api.symbol.NOT_SET,
- avalon_id
- )
- )
- # check rest of data
- data_changes = self.compare_dict(
- self.entities_dict[ftrack_id]["final_entity"],
- avalon_entity,
- ignore_keys[ftrack_id]
- )
- if data_changes:
- self.updates[avalon_id] = self.merge_dicts(
- data_changes,
- self.updates[avalon_id]
- )
-
- def synchronize(self):
- self.log.debug("* Synchronization begins")
- avalon_project_id = self.ftrack_avalon_mapper.get(self.ft_project_id)
- if avalon_project_id:
- self.avalon_project_id = ObjectId(avalon_project_id)
-
- # remove filtered ftrack ids from create/update list
- for ftrack_id in self.all_filtered_entities:
- if ftrack_id in self.create_ftrack_ids:
- self.create_ftrack_ids.remove(ftrack_id)
- elif ftrack_id in self.update_ftrack_ids:
- self.update_ftrack_ids.remove(ftrack_id)
-
- self.log.debug("* Processing entities for archivation")
- self.delete_entities()
-
- self.log.debug("* Processing new entities")
- # Create not created entities
- for ftrack_id in self.create_ftrack_ids:
- # CHECK it is possible that entity was already created
- # because is parent of another entity which was processed first
- if ftrack_id in self.ftrack_avalon_mapper:
- continue
- self.create_avalon_entity(ftrack_id)
-
- if len(self.create_list) > 0:
- self.dbcon.insert_many(self.create_list)
-
- self.session.commit()
-
- self.log.debug("* Processing entities for update")
- self.prepare_changes()
- self.update_entities()
- self.session.commit()
-
- def create_avalon_entity(self, ftrack_id):
- if ftrack_id == self.ft_project_id:
- self.create_avalon_project()
- return
-
- entity_dict = self.entities_dict[ftrack_id]
- parent_ftrack_id = entity_dict["parent_id"]
- avalon_parent = None
- if parent_ftrack_id != self.ft_project_id:
- avalon_parent = self.ftrack_avalon_mapper.get(parent_ftrack_id)
- # if not avalon_parent:
- # self.create_avalon_entity(parent_ftrack_id)
- # avalon_parent = self.ftrack_avalon_mapper[parent_ftrack_id]
- avalon_parent = ObjectId(avalon_parent)
-
- # avalon_archived_by_id avalon_archived_by_name
- current_id = (
- entity_dict["avalon_attrs"].get(self.id_cust_attr) or ""
- ).strip()
- mongo_id = current_id
- name = entity_dict["name"]
-
- # Check if exist archived asset in mongo - by ID
- unarchive = False
- unarchive_id = self.check_unarchivation(ftrack_id, mongo_id, name)
- if unarchive_id is not None:
- unarchive = True
- mongo_id = unarchive_id
-
- item = entity_dict["final_entity"]
- try:
- new_id = ObjectId(mongo_id)
- if mongo_id in self.avalon_ftrack_mapper:
- new_id = ObjectId()
- except InvalidId:
- new_id = ObjectId()
-
- item["_id"] = new_id
- item["parent"] = self.avalon_project_id
- item["schema"] = self.entity_schemas["asset"]
- item["data"]["visualParent"] = avalon_parent
-
- new_id_str = str(new_id)
- self.ftrack_avalon_mapper[ftrack_id] = new_id_str
- self.avalon_ftrack_mapper[new_id_str] = ftrack_id
-
- self._avalon_ents_by_id[new_id_str] = item
- self._avalon_ents_by_ftrack_id[ftrack_id] = new_id_str
- self._avalon_ents_by_name[item["name"]] = new_id_str
-
- if current_id != new_id_str:
- # store mongo id to ftrack entity
- configuration_id = self.entities_dict[ftrack_id][
- "avalon_attrs_id"
- ][self.id_cust_attr]
- _entity_key = collections.OrderedDict({
- "configuration_id": configuration_id,
- "entity_id": ftrack_id
- })
-
- self.session.recorded_operations.push(
- fa_session.ftrack_api.operation.UpdateEntityOperation(
- "ContextCustomAttributeValue",
- _entity_key,
- "value",
- fa_session.ftrack_api.symbol.NOT_SET,
- new_id_str
- )
- )
-
- if unarchive is False:
- self.create_list.append(item)
- return
- # If unarchive then replace entity data in database
- self.dbcon.replace_one({"_id": new_id}, item)
- self.remove_from_archived(mongo_id)
- av_ent_path_items = item["data"]["parents"]
- av_ent_path_items.append(item["name"])
- av_ent_path = "/".join(av_ent_path_items)
- self.log.debug("Entity was unarchived <{}>".format(av_ent_path))
-
- def check_unarchivation(self, ftrack_id, mongo_id, name):
- archived_by_id = self.avalon_archived_by_id.get(mongo_id)
- archived_by_name = self.avalon_archived_by_name.get(name)
-
- # if not found in archived then skip
- if not archived_by_id and not archived_by_name:
- return None
-
- entity_dict = self.entities_dict[ftrack_id]
-
- if archived_by_id:
- # if is changeable then unarchive (nothing to check here)
- if self.changeability_by_mongo_id[mongo_id]:
- return mongo_id
-
- # TODO replace `__NOTSET__` with custom None constant
- archived_parent_id = archived_by_id["data"].get(
- "visualParent", "__NOTSET__"
- )
- archived_parents = archived_by_id["data"].get("parents")
- archived_name = archived_by_id["name"]
-
- if (
- archived_name != entity_dict["name"] or
- archived_parents != entity_dict["final_entity"]["data"][
- "parents"
- ]
- ):
- return None
-
- return mongo_id
-
- # First check if there is any that have same parents
- for archived in archived_by_name:
- mongo_id = str(archived["_id"])
- archived_parents = archived.get("data", {}).get("parents")
- if (
- archived_parents == entity_dict["final_entity"]["data"][
- "parents"
- ]
- ):
- return mongo_id
-
- # Secondly try to find more close to current ftrack entity
- first_changeable = None
- for archived in archived_by_name:
- mongo_id = str(archived["_id"])
- if not self.changeability_by_mongo_id[mongo_id]:
- continue
-
- if first_changeable is None:
- first_changeable = mongo_id
-
- ftrack_parent_id = entity_dict["parent_id"]
- map_ftrack_parent_id = self.ftrack_avalon_mapper.get(
- ftrack_parent_id
- )
-
- # TODO replace `__NOTSET__` with custom None constant
- archived_parent_id = archived.get("data", {}).get(
- "visualParent", "__NOTSET__"
- )
- if archived_parent_id is not None:
- archived_parent_id = str(archived_parent_id)
-
- # skip if parent is archived - How this should be possible?
- parent_entity = self.avalon_ents_by_id.get(archived_parent_id)
- if (
- parent_entity and (
- map_ftrack_parent_id is not None and
- map_ftrack_parent_id == str(parent_entity["_id"])
- )
- ):
- return mongo_id
- # Last return first changeable with same name (or None)
- return first_changeable
-
- def create_avalon_project(self):
- project_item = self.entities_dict[self.ft_project_id]["final_entity"]
- mongo_id = (
- self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
- self.id_cust_attr
- ) or ""
- ).strip()
-
- try:
- new_id = ObjectId(mongo_id)
- except InvalidId:
- new_id = ObjectId()
-
- project_item["_id"] = new_id
- project_item["parent"] = None
- project_item["schema"] = self.entity_schemas["project"]
- project_item["config"]["schema"] = self.entity_schemas["config"]
- project_item["config"]["template"] = self.get_avalon_project_template()
-
- self.ftrack_avalon_mapper[self.ft_project_id] = new_id
- self.avalon_ftrack_mapper[new_id] = self.ft_project_id
-
- self.avalon_project_id = new_id
-
- self._avalon_ents_by_id[str(new_id)] = project_item
- self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id)
- self._avalon_ents_by_name[project_item["name"]] = str(new_id)
-
- self.create_list.append(project_item)
-
- # store mongo id to ftrack entity
- entity = self.entities_dict[self.ft_project_id]["entity"]
- entity["custom_attributes"][self.id_cust_attr] = str(new_id)
-
- def get_avalon_project_template(self):
- """Get avalon template
- Returns:
- dictionary with templates
- """
- project_name = self.entities_dict[self.ft_project_id]["name"]
- templates = Anatomy(project_name).templates
- return {
- "workfile": templates["avalon"]["workfile"],
- "work": templates["avalon"]["work"],
- "publish": templates["avalon"]["publish"]
- }
-
- def _bubble_changeability(self, unchangeable_ids):
- unchangeable_queue = queue.Queue()
- for entity_id in unchangeable_ids:
- unchangeable_queue.put((entity_id, False))
-
- processed_parents_ids = []
- subsets_to_remove = []
- while not unchangeable_queue.empty():
- entity_id, child_is_archived = unchangeable_queue.get()
- # skip if already processed
- if entity_id in processed_parents_ids:
- continue
-
- entity = self.avalon_ents_by_id.get(entity_id)
- # if entity is not archived but unchageable child was then skip
- # - archived entities should not affect not archived?
- if entity and child_is_archived:
- continue
-
- # set changeability of current entity to False
- self._changeability_by_mongo_id[entity_id] = False
- processed_parents_ids.append(entity_id)
- # if not entity then is probably archived
- if not entity:
- entity = self.avalon_archived_by_id.get(entity_id)
- child_is_archived = True
-
- if not entity:
- # if entity is not found then it is subset without parent
- if entity_id in unchangeable_ids:
- subsets_to_remove.append(entity_id)
- else:
- # TODO logging - What is happening here?
- self.log.warning((
- "Avalon contains entities without valid parents that"
- " lead to Project (should not cause errors)"
- " - MongoId <{}>"
- ).format(str(entity_id)))
- continue
-
- # skip if parent is project
- parent_id = entity["data"]["visualParent"]
- if parent_id is None:
- continue
- unchangeable_queue.put((str(parent_id), child_is_archived))
-
- self._delete_subsets_without_asset(subsets_to_remove)
-
- def _delete_subsets_without_asset(self, not_existing_parents):
- subset_ids = []
- version_ids = []
- repre_ids = []
- to_delete = []
-
- for parent_id in not_existing_parents:
- subsets = self.subsets_by_parent_id.get(parent_id)
- if not subsets:
- continue
- for subset in subsets:
- if subset.get("type") != "subset":
- continue
- subset_ids.append(subset["_id"])
-
- db_subsets = self.dbcon.find({
- "_id": {"$in": subset_ids},
- "type": "subset"
- })
- if not db_subsets:
- return
-
- db_versions = self.dbcon.find({
- "parent": {"$in": subset_ids},
- "type": "version"
- })
- if db_versions:
- version_ids = [ver["_id"] for ver in db_versions]
-
- db_repres = self.dbcon.find({
- "parent": {"$in": version_ids},
- "type": "representation"
- })
- if db_repres:
- repre_ids = [repre["_id"] for repre in db_repres]
-
- to_delete.extend(subset_ids)
- to_delete.extend(version_ids)
- to_delete.extend(repre_ids)
-
- self.dbcon.delete_many({"_id": {"$in": to_delete}})
-
- # Probably deprecated
- def _check_changeability(self, parent_id=None):
- for entity in self.avalon_ents_by_parent_id[parent_id]:
- mongo_id = str(entity["_id"])
- is_changeable = self._changeability_by_mongo_id.get(mongo_id)
- if is_changeable is not None:
- continue
-
- self._check_changeability(mongo_id)
- is_changeable = True
- for child in self.avalon_ents_by_parent_id[parent_id]:
- if not self._changeability_by_mongo_id[str(child["_id"])]:
- is_changeable = False
- break
-
- if is_changeable is True:
- is_changeable = (mongo_id in self.subsets_by_parent_id)
- self._changeability_by_mongo_id[mongo_id] = is_changeable
-
- def update_entities(self):
- mongo_changes_bulk = []
- for mongo_id, changes in self.updates.items():
- filter = {"_id": ObjectId(mongo_id)}
- change_data = self.from_dict_to_set(changes)
- mongo_changes_bulk.append(UpdateOne(filter, change_data))
-
- if not mongo_changes_bulk:
- # TODO LOG
- return
- self.dbcon.bulk_write(mongo_changes_bulk)
-
- def from_dict_to_set(self, data):
- result = {"$set": {}}
- dict_queue = queue.Queue()
- dict_queue.put((None, data))
-
- while not dict_queue.empty():
- _key, _data = dict_queue.get()
- for key, value in _data.items():
- new_key = key
- if _key is not None:
- new_key = "{}.{}".format(_key, key)
-
- if not isinstance(value, dict):
- result["$set"][new_key] = value
- continue
- dict_queue.put((new_key, value))
- return result
-
- def reload_parents(self, hierarchy_changing_ids):
- parents_queue = queue.Queue()
- parents_queue.put((self.ft_project_id, [], False))
- while not parents_queue.empty():
- ftrack_id, parent_parents, changed = parents_queue.get()
- _parents = parent_parents.copy()
- if ftrack_id not in hierarchy_changing_ids and not changed:
- if ftrack_id != self.ft_project_id:
- _parents.append(self.entities_dict[ftrack_id]["name"])
- for child_id in self.entities_dict[ftrack_id]["children"]:
- parents_queue.put((child_id, _parents, changed))
- continue
-
- changed = True
- parents = [par for par in _parents]
- hierarchy = "/".join(parents)
- self.entities_dict[ftrack_id][
- "final_entity"]["data"]["parents"] = parents
- self.entities_dict[ftrack_id][
- "final_entity"]["data"]["hierarchy"] = hierarchy
-
- _parents.append(self.entities_dict[ftrack_id]["name"])
- for child_id in self.entities_dict[ftrack_id]["children"]:
- parents_queue.put((child_id, _parents, changed))
-
- if ftrack_id in self.create_ftrack_ids:
- mongo_id = self.ftrack_avalon_mapper[ftrack_id]
- if "data" not in self.updates[mongo_id]:
- self.updates[mongo_id]["data"] = {}
- self.updates[mongo_id]["data"]["parents"] = parents
- self.updates[mongo_id]["data"]["hierarchy"] = hierarchy
-
- def prepare_project_changes(self):
- ftrack_ent_dict = self.entities_dict[self.ft_project_id]
- ftrack_entity = ftrack_ent_dict["entity"]
- avalon_code = self.avalon_project["data"]["code"]
- # TODO Is possible to sync if full name was changed?
- # if ftrack_ent_dict["name"] != self.avalon_project["name"]:
- # ftrack_entity["full_name"] = avalon_name
- # self.entities_dict[self.ft_project_id]["name"] = avalon_name
- # self.entities_dict[self.ft_project_id]["final_entity"][
- # "name"
- # ] = avalon_name
-
- # TODO logging
- # TODO report
- # TODO May this happen? Is possible to change project code?
- if ftrack_entity["name"] != avalon_code:
- ftrack_entity["name"] = avalon_code
- self.entities_dict[self.ft_project_id]["final_entity"]["data"][
- "code"
- ] = avalon_code
- self.session.commit()
- sub_msg = (
- "Project code was changed back to \"{}\"".format(avalon_code)
- )
- msg = (
- "It is not possible to change"
- " project code after synchronization"
- )
- self.report_items["warning"][msg] = sub_msg
- self.log.warning(sub_msg)
-
- return self.compare_dict(
- self.entities_dict[self.ft_project_id]["final_entity"],
- self.avalon_project
- )
-
- def compare_dict(self, dict_new, dict_old, _ignore_keys=[]):
- # _ignore_keys may be used for keys nested dict like"data.visualParent"
- changes = {}
- ignore_keys = []
- for key_val in _ignore_keys:
- key_items = key_val.split(".")
- if len(key_items) == 1:
- ignore_keys.append(key_items[0])
-
- for key, value in dict_new.items():
- if key in ignore_keys:
- continue
-
- if key not in dict_old:
- changes[key] = value
- continue
-
- if isinstance(value, dict):
- if not isinstance(dict_old[key], dict):
- changes[key] = value
- continue
-
- _new_ignore_keys = []
- for key_val in _ignore_keys:
- key_items = key_val.split(".")
- if len(key_items) <= 1:
- continue
- _new_ignore_keys.append(".".join(key_items[1:]))
-
- _changes = self.compare_dict(
- value, dict_old[key], _new_ignore_keys
- )
- if _changes:
- changes[key] = _changes
- continue
-
- if value != dict_old[key]:
- changes[key] = value
-
- return changes
-
- def merge_dicts(self, dict_new, dict_old):
- for key, value in dict_new.items():
- if key not in dict_old:
- dict_old[key] = value
- continue
-
- if isinstance(value, dict):
- dict_old[key] = self.merge_dicts(value, dict_old[key])
- continue
-
- dict_old[key] = value
-
- return dict_old
-
- def delete_entities(self):
- if not self.deleted_entities:
- return
- # Try to order so child is not processed before parent
- deleted_entities = []
- _deleted_entities = [id for id in self.deleted_entities]
-
- while True:
- if not _deleted_entities:
- break
- _ready = []
- for mongo_id in _deleted_entities:
- ent = self.avalon_ents_by_id[mongo_id]
- vis_par = ent["data"]["visualParent"]
- if (
- vis_par is not None and
- str(vis_par) in self.deleted_entities
- ):
- continue
- _ready.append(mongo_id)
-
- for id in _ready:
- deleted_entities.append(id)
- _deleted_entities.remove(id)
-
- delete_ids = []
- for mongo_id in deleted_entities:
- # delete if they are deletable
- if self.changeability_by_mongo_id[mongo_id]:
- delete_ids.append(ObjectId(mongo_id))
- continue
-
- # check if any new created entity match same entity
- # - name and parents must match
- deleted_entity = self.avalon_ents_by_id[mongo_id]
- name = deleted_entity["name"]
- parents = deleted_entity["data"]["parents"]
- similar_ent_id = None
- for ftrack_id in self.create_ftrack_ids:
- _ent_final = self.entities_dict[ftrack_id]["final_entity"]
- if _ent_final["name"] != name:
- continue
- if _ent_final["data"]["parents"] != parents:
- continue
-
- # If in create is "same" then we can "archive" current
- # since will be unarchived in create method
- similar_ent_id = ftrack_id
- break
-
- # If similar entity(same name and parents) is in create
- # entities list then just change from create to update
- if similar_ent_id is not None:
- self.create_ftrack_ids.remove(similar_ent_id)
- self.update_ftrack_ids.append(similar_ent_id)
- self.avalon_ftrack_mapper[mongo_id] = similar_ent_id
- self.ftrack_avalon_mapper[similar_ent_id] = mongo_id
- continue
-
- found_by_name_id = None
- for ftrack_id, ent_dict in self.entities_dict.items():
- if not ent_dict.get("name"):
- continue
-
- if name == ent_dict["name"]:
- found_by_name_id = ftrack_id
- break
-
- if found_by_name_id is not None:
- # * THESE conditins are too complex to implement in first stage
- # - probably not possible to solve if this happen
- # if found_by_name_id in self.create_ftrack_ids:
- # # reparent entity of the new one create?
- # pass
- #
- # elif found_by_name_id in self.update_ftrack_ids:
- # found_mongo_id = self.ftrack_avalon_mapper[found_by_name_id]
- #
- # ent_dict = self.entities_dict[found_by_name_id]
-
- # TODO report - CRITICAL entity with same name alread exists in
- # different hierarchy - can't recreate entity
- continue
-
- _vis_parent = str(deleted_entity["data"]["visualParent"])
- if _vis_parent is None:
- _vis_parent = self.avalon_project_id
- ftrack_parent_id = self.avalon_ftrack_mapper[_vis_parent]
- self.create_ftrack_ent_from_avalon_ent(
- deleted_entity, ftrack_parent_id
- )
-
- filter = {"_id": {"$in": delete_ids}, "type": "asset"}
- self.dbcon.update_many(filter, {"$set": {"type": "archived_asset"}})
-
- def create_ftrack_ent_from_avalon_ent(self, av_entity, parent_id):
- new_entity = None
- parent_entity = self.entities_dict[parent_id]["entity"]
-
- _name = av_entity["name"]
- _type = av_entity["data"].get("entityType", "folder")
-
- self.log.debug((
- "Re-ceating deleted entity {} <{}>"
- ).format(_name, _type))
-
- new_entity = self.session.create(_type, {
- "name": _name,
- "parent": parent_entity
- })
-
- final_entity = {}
- for k, v in av_entity.items():
- final_entity[k] = v
-
- if final_entity.get("type") != "asset":
- final_entity["type"] = "asset"
-
- new_entity_id = new_entity["id"]
- new_entity_data = {
- "entity": new_entity,
- "parent_id": parent_id,
- "entity_type": _type.lower(),
- "entity_type_orig": _type,
- "name": _name,
- "final_entity": final_entity
- }
- for k, v in new_entity_data.items():
- self.entities_dict[new_entity_id][k] = v
-
- p_chilren = self.entities_dict[parent_id]["children"]
- if new_entity_id not in p_chilren:
- self.entities_dict[parent_id]["children"].append(new_entity_id)
-
- cust_attr, hier_attrs = self.get_avalon_attr()
- for _attr in cust_attr:
- key = _attr["key"]
- if key not in av_entity["data"]:
- continue
-
- if key not in new_entity["custom_attributes"]:
- continue
-
- value = av_entity["data"][key]
- if not value:
- continue
-
- new_entity["custom_attributes"][key] = value
-
- av_entity_id = str(av_entity["_id"])
- new_entity["custom_attributes"][self.id_cust_attr] = av_entity_id
-
- self.ftrack_avalon_mapper[new_entity_id] = av_entity_id
- self.avalon_ftrack_mapper[av_entity_id] = new_entity_id
-
- self.session.commit()
-
- ent_path = self.get_ent_path(new_entity_id)
- msg = (
- "Deleted entity was recreated because it or its children"
- " contain published data"
- )
-
- self.report_items["info"][msg].append(ent_path)
-
- return new_entity_id
-
- def regex_duplicate_interface(self):
- items = []
- if self.failed_regex or self.tasks_failed_regex:
- subtitle = "Entity names contain prohibited symbols:"
- items.append({
- "type": "label",
- "value": "# {}".format(subtitle)
- })
- items.append({
- "type": "label",
- "value": (
- "NOTE: You can use Letters( a-Z ),"
- " Numbers( 0-9 ) and Underscore( _ )
"
- )
- })
- log_msgs = []
- for name, ids in self.failed_regex.items():
- error_title = {
- "type": "label",
- "value": "## {}".format(name)
- }
- items.append(error_title)
- paths = []
- for entity_id in ids:
- ent_path = self.get_ent_path(entity_id)
- paths.append(ent_path)
-
- error_message = {
- "type": "label",
- "value": '{}
'.format("
".join(paths))
- }
- items.append(error_message)
- log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
-
- for name, ids in self.tasks_failed_regex.items():
- error_title = {
- "type": "label",
- "value": "## Task: {}".format(name)
- }
- items.append(error_title)
- paths = []
- for entity_id in ids:
- ent_path = self.get_ent_path(entity_id)
- ent_path = "/".join([ent_path, name])
- paths.append(ent_path)
-
- error_message = {
- "type": "label",
- "value": '{}
'.format("
".join(paths))
- }
- items.append(error_message)
- log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
-
- self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
-
- if self.duplicates:
- subtitle = "Duplicated entity names:"
- items.append({
- "type": "label",
- "value": "# {}".format(subtitle)
- })
- items.append({
- "type": "label",
- "value": (
- "NOTE: It is not allowed to use the same name"
- " for multiple entities in the same project
"
- )
- })
- log_msgs = []
- for name, ids in self.duplicates.items():
- error_title = {
- "type": "label",
- "value": "## {}".format(name)
- }
- items.append(error_title)
- paths = []
- for entity_id in ids:
- ent_path = self.get_ent_path(entity_id)
- paths.append(ent_path)
-
- error_message = {
- "type": "label",
- "value": '{}
'.format("
".join(paths))
- }
- items.append(error_message)
- log_msgs.append("<{}> ({})".format(name, ", ".join(paths)))
-
- self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
-
- return items
-
- def get_avalon_attr(self, split_hierarchical=True):
- custom_attributes = []
- hier_custom_attributes = []
- cust_attrs_query = (
- "select id, entity_type, object_type_id, is_hierarchical, default"
- " from CustomAttributeConfiguration"
- " where group.name = \"avalon\""
- )
- all_avalon_attr = self.session.query(cust_attrs_query).all()
- for cust_attr in all_avalon_attr:
- if split_hierarchical and cust_attr["is_hierarchical"]:
- hier_custom_attributes.append(cust_attr)
- continue
-
- custom_attributes.append(cust_attr)
-
- if split_hierarchical:
- # return tuple
- return custom_attributes, hier_custom_attributes
-
- return custom_attributes
-
- def report(self):
- items = []
- project_name = self.entities_dict[self.ft_project_id]["name"]
- title = "Synchronization report ({}):".format(project_name)
-
- keys = ["error", "warning", "info"]
- for key in keys:
- subitems = []
- if key == "warning":
- for _item in self.regex_duplicate_interface():
- subitems.append(_item)
-
- for msg, _items in self.report_items[key].items():
- if not _items:
- continue
-
- subitems.append({
- "type": "label",
- "value": "# {}".format(msg)
- })
- if isinstance(_items, str):
- _items = [_items]
- subitems.append({
- "type": "label",
- "value": '{}
'.format("
".join(_items))
- })
-
- if items and subitems:
- items.append(self.report_splitter)
-
- items.extend(subitems)
-
- return {
- "items": items,
- "title": title,
- "success": False,
- "message": "Synchronization Finished"
- }
+from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory
class SyncToAvalonLocal(BaseAction):
@@ -2191,6 +47,10 @@ class SyncToAvalonLocal(BaseAction):
os.environ.get('PYPE_STATICS_SERVER', '')
)
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.entities_factory = SyncEntitiesFactory(self.log, self.session)
+
def discover(self, session, entities, event):
''' Validation '''
for ent in event["data"]["selection"]:
@@ -2210,28 +70,26 @@ class SyncToAvalonLocal(BaseAction):
ft_project_name = in_entities[0]["project"]["full_name"]
try:
- entities_factory = SyncEntitiesFactory(
- self.log, session, ft_project_name
- )
+ self.entities_factory.launch_setup(ft_project_name)
time_1 = time.time()
- entities_factory.set_cutom_attributes()
+ self.entities_factory.set_cutom_attributes()
time_2 = time.time()
# This must happen before all filtering!!!
- entities_factory.prepare_avalon_entities(ft_project_name)
+ self.entities_factory.prepare_avalon_entities(ft_project_name)
time_3 = time.time()
- entities_factory.filter_by_ignore_sync()
+ self.entities_factory.filter_by_ignore_sync()
time_4 = time.time()
- entities_factory.duplicity_regex_check()
+ self.entities_factory.duplicity_regex_check()
time_5 = time.time()
- entities_factory.prepare_ftrack_ent_data()
+ self.entities_factory.prepare_ftrack_ent_data()
time_6 = time.time()
- entities_factory.synchronize()
+ self.entities_factory.synchronize()
time_7 = time.time()
self.log.debug(
@@ -2262,7 +120,7 @@ class SyncToAvalonLocal(BaseAction):
"* Total time: {}".format(time_7 - time_start)
)
- report = entities_factory.report()
+ report = self.entities_factory.report()
if report and report.get("items"):
default_title = "Synchronization report ({}):".format(
ft_project_name
@@ -2304,13 +162,13 @@ class SyncToAvalonLocal(BaseAction):
report = {"items": []}
try:
- report = entities_factory.report()
+ report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items", [])
if _items:
- items.append(entities_factory.report_splitter)
+ items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event)
@@ -2319,12 +177,12 @@ class SyncToAvalonLocal(BaseAction):
finally:
try:
- entities_factory.dbcon.uninstall()
+ self.entities_factory.dbcon.uninstall()
except Exception:
pass
try:
- entities_factory.session.close()
+ self.entities_factory.session.close()
except Exception:
pass
diff --git a/pype/ftrack/actions/action_where_run_ask.py b/pype/ftrack/actions/action_where_run_ask.py
index 7fc08c1f68..a28f32f407 100644
--- a/pype/ftrack/actions/action_where_run_ask.py
+++ b/pype/ftrack/actions/action_where_run_ask.py
@@ -1,7 +1,5 @@
import os
-import ftrack_api
from pype.ftrack import BaseAction
-from ftrack_api import session as fa_session
class ActionAskWhereIRun(BaseAction):
diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py
index 217fef4c07..9f9deeab95 100644
--- a/pype/ftrack/events/action_sync_to_avalon.py
+++ b/pype/ftrack/events/action_sync_to_avalon.py
@@ -1,2145 +1,10 @@
import os
-import collections
-import re
-import queue
import time
-import toml
import traceback
-from bson.objectid import ObjectId
-from bson.errors import InvalidId
-from pymongo import UpdateOne
-
-import avalon
from pype.ftrack import BaseAction
-from pype.ftrack.lib.io_nonsingleton import DbConnector
-import ftrack_api
-from ftrack_api import session as fa_session
-from pypeapp import Anatomy, config
-
-
-class SyncEntitiesFactory:
- dbcon = DbConnector()
-
- project_query = (
- "select full_name, name, custom_attributes"
- ", project_schema._task_type_schema.types.name"
- " from Project where full_name is \"{}\""
- )
- entities_query = (
- "select id, name, parent_id, link"
- " from TypedContext where project_id is \"{}\""
- )
- ignore_custom_attr_key = "avalon_ignore_sync"
- id_cust_attr = "avalon_mongo_id"
-
- entity_schemas = {
- "project": "avalon-core:project-2.0",
- "asset": "avalon-core:asset-3.0",
- "config": "avalon-core:config-1.0"
- }
-
- report_splitter = {"type": "label", "value": "---"}
-
- def __init__(self, log_obj, _session, project_full_name):
- self.log = log_obj
- self.session = ftrack_api.Session(
- server_url=_session.server_url,
- api_key=_session.api_key,
- api_user=_session.api_user,
- auto_connect_event_hub=True
- )
-
- self.cancel_auto_sync = False
-
- self.schema_patterns = {}
- self.duplicates = {}
- self.failed_regex = {}
- self.tasks_failed_regex = collections.defaultdict(list)
- self.report_items = {
- "info": collections.defaultdict(list),
- "warning": collections.defaultdict(list),
- "error": collections.defaultdict(list)
- }
-
- self.create_list = []
- self.recreated_ftrack_ents = {}
- self.updates = collections.defaultdict(dict)
-
- self._avalon_ents_by_id = None
- self._avalon_ents_by_ftrack_id = None
- self._avalon_ents_by_name = None
- self._avalon_ents_by_parent_id = None
-
- self._avalon_archived_ents = None
- self._avalon_archived_by_id = None
- self._avalon_archived_by_parent_id = None
- self._avalon_archived_by_name = None
-
- self._subsets_by_parent_id = None
- self._changeability_by_mongo_id = None
-
- self.all_filtered_entities = {}
- # self.all_filtered_ids = []
- self.filtered_ids = []
- self.not_selected_ids = []
-
- self._ent_pats_by_ftrack_id = {}
-
- # Get Ftrack project
- ft_project = self.session.query(
- self.project_query.format(project_full_name)
- ).one()
- ft_project_id = ft_project["id"]
-
- # Skip if project is ignored
- if ft_project["custom_attributes"].get(
- self.ignore_custom_attr_key
- ) is True:
- msg = (
- "Project \"{}\" has set `Ignore Sync` custom attribute to True"
- ).format(project_full_name)
- self.log.warning(msg)
- return {"success": False, "message": msg}
-
- # Check if `avalon_mongo_id` custom attribute exist or is accessible
- if self.id_cust_attr not in ft_project["custom_attributes"]:
- items = []
- items.append({
- "type": "label",
- "value": "# Can't access Custom attribute <{}>".format(
- self.id_cust_attr
- )
- })
- items.append({
- "type": "label",
- "value": (
- "- Check if user \"{}\" has permissions"
- " to access the Custom attribute
"
- ).format(_session.api_key)
- })
- items.append({
- "type": "label",
- "value": "- Check if the Custom attribute exist
"
- })
- return {
- "items": items,
- "title": "Synchronization failed",
- "success": False,
- "message": "Synchronization failed"
- }
-
- # Find all entities in project
- all_project_entities = self.session.query(
- self.entities_query.format(ft_project_id)
- ).all()
-
- # Store entities by `id` and `parent_id`
- entities_dict = collections.defaultdict(lambda: {
- "children": list(),
- "parent_id": None,
- "entity": None,
- "entity_type": None,
- "name": None,
- "custom_attributes": {},
- "hier_attrs": {},
- "avalon_attrs": {},
- "tasks": []
- })
-
- for entity in all_project_entities:
- parent_id = entity["parent_id"]
- entity_type = entity.entity_type
- entity_type_low = entity_type.lower()
- if entity_type_low == "task":
- entities_dict[parent_id]["tasks"].append(entity["name"])
- continue
-
- entity_id = entity["id"]
- entities_dict[entity_id].update({
- "entity": entity,
- "parent_id": parent_id,
- "entity_type": entity_type_low,
- "entity_type_orig": entity_type,
- "name": entity["name"]
- })
- entities_dict[parent_id]["children"].append(entity_id)
-
- entities_dict[ft_project_id]["entity"] = ft_project
- entities_dict[ft_project_id]["entity_type"] = (
- ft_project.entity_type.lower()
- )
- entities_dict[ft_project_id]["entity_type_orig"] = (
- ft_project.entity_type
- )
- entities_dict[ft_project_id]["name"] = ft_project["full_name"]
-
- self.ft_project_id = ft_project_id
- self.entities_dict = entities_dict
-
- @property
- def avalon_ents_by_id(self):
- if self._avalon_ents_by_id is None:
- self._avalon_ents_by_id = {}
- for entity in self.avalon_entities:
- self._avalon_ents_by_id[str(entity["_id"])] = entity
-
- return self._avalon_ents_by_id
-
- @property
- def avalon_ents_by_ftrack_id(self):
- if self._avalon_ents_by_ftrack_id is None:
- self._avalon_ents_by_ftrack_id = {}
- for entity in self.avalon_entities:
- key = entity.get("data", {}).get("ftrackId")
- if not key:
- continue
- self._avalon_ents_by_ftrack_id[key] = str(entity["_id"])
-
- return self._avalon_ents_by_ftrack_id
-
- @property
- def avalon_ents_by_name(self):
- if self._avalon_ents_by_name is None:
- self._avalon_ents_by_name = {}
- for entity in self.avalon_entities:
- self._avalon_ents_by_name[entity["name"]] = str(entity["_id"])
-
- return self._avalon_ents_by_name
-
- @property
- def avalon_ents_by_parent_id(self):
- if self._avalon_ents_by_parent_id is None:
- self._avalon_ents_by_parent_id = collections.defaultdict(list)
- for entity in self.avalon_entities:
- parent_id = entity["data"]["visualParent"]
- if parent_id is not None:
- parent_id = str(parent_id)
- self._avalon_ents_by_parent_id[parent_id].append(entity)
-
- return self._avalon_ents_by_parent_id
-
- @property
- def avalon_archived_ents(self):
- if self._avalon_archived_ents is None:
- self._avalon_archived_ents = [
- ent for ent in self.dbcon.find({"type": "archived_asset"})
- ]
- return self._avalon_archived_ents
-
- @property
- def avalon_archived_by_name(self):
- if self._avalon_archived_by_name is None:
- self._avalon_archived_by_name = collections.defaultdict(list)
- for ent in self.avalon_archived_ents:
- self._avalon_archived_by_name[ent["name"]].append(ent)
- return self._avalon_archived_by_name
-
- @property
- def avalon_archived_by_id(self):
- if self._avalon_archived_by_id is None:
- self._avalon_archived_by_id = {
- str(ent["_id"]): ent for ent in self.avalon_archived_ents
- }
- return self._avalon_archived_by_id
-
- @property
- def avalon_archived_by_parent_id(self):
- if self._avalon_archived_by_parent_id is None:
- self._avalon_archived_by_parent_id = collections.defaultdict(list)
- for entity in self.avalon_archived_ents:
- parent_id = entity["data"]["visualParent"]
- if parent_id is not None:
- parent_id = str(parent_id)
- self._avalon_archived_by_parent_id[parent_id].append(entity)
-
- return self._avalon_archived_by_parent_id
-
- @property
- def subsets_by_parent_id(self):
- if self._subsets_by_parent_id is None:
- self._subsets_by_parent_id = collections.defaultdict(list)
- for subset in self.dbcon.find({"type": "subset"}):
- self._subsets_by_parent_id[str(subset["parent"])].append(
- subset
- )
-
- return self._subsets_by_parent_id
-
- @property
- def changeability_by_mongo_id(self):
- if self._changeability_by_mongo_id is None:
- self._changeability_by_mongo_id = collections.defaultdict(
- lambda: True
- )
- self._changeability_by_mongo_id[self.avalon_project_id] = False
- self._bubble_changeability(list(self.subsets_by_parent_id.keys()))
- return self._changeability_by_mongo_id
-
- @property
- def all_ftrack_names(self):
- return [
- ent_dict["name"] for ent_dict in self.entities_dict.values() if (
- ent_dict.get("name")
- )
- ]
-
- def duplicity_regex_check(self):
- self.log.debug("* Checking duplicities and invalid symbols")
- # Duplicity and regex check
- entity_ids_by_name = {}
- duplicates = []
- failed_regex = []
- task_names = {}
- for ftrack_id, entity_dict in self.entities_dict.items():
- regex_check = True
- name = entity_dict["name"]
- entity_type = entity_dict["entity_type"]
- # Tasks must be checked too
- for task_name in entity_dict["tasks"]:
- passed = task_names.get(task_name)
- if passed is None:
- passed = self.check_regex(task_name, "task")
- task_names[task_name] = passed
-
- if not passed:
- self.tasks_failed_regex[task_name].append(ftrack_id)
-
- if name in entity_ids_by_name:
- duplicates.append(name)
- else:
- entity_ids_by_name[name] = []
- regex_check = self.check_regex(name, entity_type)
-
- entity_ids_by_name[name].append(ftrack_id)
- if not regex_check:
- failed_regex.append(name)
-
- for name in failed_regex:
- self.failed_regex[name] = entity_ids_by_name[name]
-
- for name in duplicates:
- self.duplicates[name] = entity_ids_by_name[name]
-
- self.filter_by_duplicate_regex()
-
- def check_regex(self, name, entity_type, in_schema=None):
- schema_name = "asset-3.0"
- if in_schema:
- schema_name = in_schema
- elif entity_type == "project":
- schema_name = "project-2.0"
- elif entity_type == "task":
- schema_name = "task"
-
- name_pattern = self.schema_patterns.get(schema_name)
- if not name_pattern:
- default_pattern = "^[a-zA-Z0-9_.]*$"
- schema_obj = avalon.schema._cache.get(schema_name + ".json")
- if not schema_obj:
- name_pattern = default_pattern
- else:
- name_pattern = schema_obj.get(
- "properties", {}).get(
- "name", {}).get(
- "pattern", default_pattern
- )
- self.schema_patterns[schema_name] = name_pattern
-
- if re.match(name_pattern, name):
- return True
- return False
-
- def filter_by_duplicate_regex(self):
- filter_queue = queue.Queue()
- failed_regex_msg = "{} - Entity has invalid symbol/s in name"
- duplicate_msg = "Multiple entities have name \"{}\":"
-
- for ids in self.failed_regex.values():
- for id in ids:
- ent_path = self.get_ent_path(id)
- self.log.warning(failed_regex_msg.format(ent_path))
- filter_queue.put(id)
-
- for name, ids in self.duplicates.items():
- self.log.warning(duplicate_msg.format(name))
- for id in ids:
- ent_path = self.get_ent_path(id)
- self.log.warning(ent_path)
- filter_queue.put(id)
-
- filtered_ids = []
- while not filter_queue.empty():
- ftrack_id = filter_queue.get()
- if ftrack_id in filtered_ids:
- continue
-
- entity_dict = self.entities_dict.pop(ftrack_id, {})
- if not entity_dict:
- continue
-
- self.all_filtered_entities[ftrack_id] = entity_dict
- parent_id = entity_dict.get("parent_id")
- if parent_id and parent_id in self.entities_dict:
- if ftrack_id in self.entities_dict[parent_id]["children"]:
- self.entities_dict[parent_id]["children"].remove(ftrack_id)
-
- filtered_ids.append(ftrack_id)
- for child_id in entity_dict.get("children", []):
- filter_queue.put(child_id)
-
- # self.all_filtered_ids.extend(filtered_ids)
-
- for name, ids in self.tasks_failed_regex.items():
- for id in ids:
- if id not in self.entities_dict:
- continue
- self.entities_dict[id]["tasks"].remove(name)
- ent_path = self.get_ent_path(id)
- self.log.warning(failed_regex_msg.format(
- "/".join([ent_path, name])
- ))
-
- def filter_by_ignore_sync(self):
- # skip filtering if `ignore_sync` attribute do not exist
- if self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
- self.ignore_custom_attr_key, "_notset_"
- ) == "_notset_":
- return
-
- self.filter_queue = queue.Queue()
- self.filter_queue.put((self.ft_project_id, False))
- while not self.filter_queue.empty():
- parent_id, remove = self.filter_queue.get()
- if remove:
- parent_dict = self.entities_dict.pop(parent_id, {})
- self.all_filtered_entities[parent_id] = parent_dict
- self.filtered_ids.append(parent_id)
- else:
- parent_dict = self.entities_dict.get(parent_id, {})
-
- for child_id in parent_dict.get("children", []):
- # keep original `remove` value for all childs
- _remove = (remove is True)
- if not _remove:
- if self.entities_dict[child_id]["avalon_attrs"].get(
- self.ignore_custom_attr_key
- ):
- self.entities_dict[parent_id]["children"].remove(
- child_id
- )
- _remove = True
- self.filter_queue.put((child_id, _remove))
-
- # self.all_filtered_ids.extend(self.filtered_ids)
-
- def filter_by_selection(self, event):
- # BUGGY!!!! cause that entities are in deleted list
- # TODO may be working when filtering happen after preparations
- # - But this part probably does not have any functional reason
- # - Time of synchronization probably won't be changed much
- selected_ids = []
- for entity in event["data"]["selection"]:
- # Skip if project is in selection
- if entity["entityType"] == "show":
- return
- selected_ids.append(entity["entityId"])
-
- sync_ids = [self.ft_project_id]
- parents_queue = queue.Queue()
- children_queue = queue.Queue()
- for id in selected_ids:
- # skip if already filtered with ignore sync custom attribute
- if id in self.filtered_ids:
- continue
-
- parents_queue.put(id)
- children_queue.put(id)
-
- while not parents_queue.empty():
- id = parents_queue.get()
- while True:
- # Stops when parent is in sync_ids
- if id in self.filtered_ids or id in sync_ids or id is None:
- break
- sync_ids.append(id)
- id = self.entities_dict[id]["parent_id"]
-
- while not children_queue.empty():
- parent_id = children_queue.get()
- for child_id in self.entities_dict[parent_id]["children"]:
- if child_id in sync_ids or child_id in self.filtered_ids:
- continue
- sync_ids.append(child_id)
- children_queue.put(child_id)
-
- # separate not selected and to process entities
- for key, value in self.entities_dict.items():
- if key not in sync_ids:
- self.not_selected_ids.append(key)
-
- for id in self.not_selected_ids:
- # pop from entities
- value = self.entities_dict.pop(id)
- # remove entity from parent's children
- parent_id = value["parent_id"]
- if parent_id not in sync_ids:
- continue
-
- self.entities_dict[parent_id]["children"].remove(id)
-
- def set_cutom_attributes(self):
- self.log.debug("* Preparing custom attributes")
- # Get custom attributes and values
- custom_attrs, hier_attrs = self.get_avalon_attr(True)
- ent_types = self.session.query("select id, name from ObjectType").all()
- ent_types_by_name = {
- ent_type["name"]: ent_type["id"] for ent_type in ent_types
- }
-
- attrs = set()
- # store default values per entity type
- attrs_per_entity_type = collections.defaultdict(dict)
- avalon_attrs = collections.defaultdict(dict)
- # store also custom attribute configuration id for future use (create)
- attrs_per_entity_type_ca_id = collections.defaultdict(dict)
- avalon_attrs_ca_id = collections.defaultdict(dict)
-
- for cust_attr in custom_attrs:
- key = cust_attr["key"]
- attrs.add(key)
- ca_ent_type = cust_attr["entity_type"]
- if key.startswith("avalon_"):
- if ca_ent_type == "show":
- avalon_attrs[ca_ent_type][key] = cust_attr["default"]
- avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"]
- else:
- obj_id = cust_attr["object_type_id"]
- avalon_attrs[obj_id][key] = cust_attr["default"]
- avalon_attrs_ca_id[obj_id][key] = cust_attr["id"]
- continue
-
- if ca_ent_type == "show":
- attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"]
- attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"]
- else:
- obj_id = cust_attr["object_type_id"]
- attrs_per_entity_type[obj_id][key] = cust_attr["default"]
- attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"]
-
- obj_id_ent_type_map = {}
- sync_ids = []
- for entity_id, entity_dict in self.entities_dict.items():
- sync_ids.append(entity_id)
- entity_type = entity_dict["entity_type"]
- entity_type_orig = entity_dict["entity_type_orig"]
-
- if entity_type == "project":
- attr_key = "show"
- else:
- map_key = obj_id_ent_type_map.get(entity_type_orig)
- if not map_key:
- # Put space between capitals
- # (e.g. 'AssetBuild' -> 'Asset Build')
- map_key = re.sub(
- r"(\w)([A-Z])", r"\1 \2", entity_type_orig
- )
- obj_id_ent_type_map[entity_type_orig] = map_key
-
- # Get object id of entity type
- attr_key = ent_types_by_name.get(map_key)
-
- # Backup soluction when id is not found by prequeried objects
- if not attr_key:
- query = "ObjectType where name is \"{}\"".format(map_key)
- attr_key = self.session.query(query).one()["id"]
- ent_types_by_name[map_key] = attr_key
-
- prepared_attrs = attrs_per_entity_type.get(attr_key)
- prepared_avalon_attr = avalon_attrs.get(attr_key)
- prepared_attrs_ca_id = attrs_per_entity_type_ca_id.get(attr_key)
- prepared_avalon_attr_ca_id = avalon_attrs_ca_id.get(attr_key)
- if prepared_attrs:
- self.entities_dict[entity_id]["custom_attributes"] = (
- prepared_attrs.copy()
- )
- if prepared_attrs_ca_id:
- self.entities_dict[entity_id]["custom_attributes_id"] = (
- prepared_attrs_ca_id.copy()
- )
- if prepared_avalon_attr:
- self.entities_dict[entity_id]["avalon_attrs"] = (
- prepared_avalon_attr.copy()
- )
- if prepared_avalon_attr_ca_id:
- self.entities_dict[entity_id]["avalon_attrs_id"] = (
- prepared_avalon_attr_ca_id.copy()
- )
-
- # TODO query custom attributes by entity_id
- entity_ids_joined = ", ".join([
- "\"{}\"".format(id) for id in sync_ids
- ])
- attributes_joined = ", ".join([
- "\"{}\"".format(name) for name in attrs
- ])
-
- cust_attr_query = (
- "select value, entity_id from ContextCustomAttributeValue "
- "where entity_id in ({}) and configuration.key in ({})"
- )
- [values] = self.session._call([{
- "action": "query",
- "expression": cust_attr_query.format(
- entity_ids_joined, attributes_joined
- )
- }])
-
- for value in values["data"]:
- entity_id = value["entity_id"]
- key = value["configuration"]["key"]
- store_key = "custom_attributes"
- if key.startswith("avalon_"):
- store_key = "avalon_attrs"
- self.entities_dict[entity_id][store_key][key] = value["value"]
-
- # process hierarchical attributes
- self.set_hierarchical_attribute(hier_attrs, sync_ids)
-
- def set_hierarchical_attribute(self, hier_attrs, sync_ids):
- # collect all hierarchical attribute keys
- # and prepare default values to project
- attribute_names = []
- for attr in hier_attrs:
- key = attr["key"]
- attribute_names.append(key)
-
- store_key = "hier_attrs"
- if key.startswith("avalon_"):
- store_key = "avalon_attrs"
-
- self.entities_dict[self.ft_project_id][store_key][key] = (
- attr["default"]
- )
-
- # Prepare dict with all hier keys and None values
- prepare_dict = {}
- prepare_dict_avalon = {}
- for attr in attribute_names:
- if attr.startswith("avalon_"):
- prepare_dict_avalon[attr] = None
- else:
- prepare_dict[attr] = None
-
- for id, entity_dict in self.entities_dict.items():
- # Skip project because has stored defaults at the moment
- if entity_dict["entity_type"] == "project":
- continue
- entity_dict["hier_attrs"] = prepare_dict.copy()
- for key, val in prepare_dict_avalon.items():
- entity_dict["avalon_attrs"][key] = val
-
- # Prepare values to query
- entity_ids_joined = ", ".join([
- "\"{}\"".format(id) for id in sync_ids
- ])
- attributes_joined = ", ".join([
- "\"{}\"".format(name) for name in attribute_names
- ])
- [values] = self.session._call([{
- "action": "query",
- "expression": (
- "select value, entity_id from ContextCustomAttributeValue "
- "where entity_id in ({}) and configuration.key in ({})"
- ).format(entity_ids_joined, attributes_joined)
- }])
-
- avalon_hier = []
- for value in values["data"]:
- if value["value"] is None:
- continue
- entity_id = value["entity_id"]
- key = value["configuration"]["key"]
- store_key = "hier_attrs"
- if key.startswith("avalon_"):
- store_key = "avalon_attrs"
- avalon_hier.append(key)
- self.entities_dict[entity_id][store_key][key] = value["value"]
-
- # Get dictionary with not None hierarchical values to pull to childs
- top_id = self.ft_project_id
- project_values = {}
- for key, value in self.entities_dict[top_id]["hier_attrs"].items():
- if value is not None:
- project_values[key] = value
-
- for key in avalon_hier:
- value = self.entities_dict[top_id]["avalon_attrs"][key]
- if value is not None:
- project_values[key] = value
-
- hier_down_queue = queue.Queue()
- hier_down_queue.put((project_values, top_id))
-
- while not hier_down_queue.empty():
- hier_values, parent_id = hier_down_queue.get()
- for child_id in self.entities_dict[parent_id]["children"]:
- _hier_values = hier_values.copy()
- for name in attribute_names:
- store_key = "hier_attrs"
- if name.startswith("avalon_"):
- store_key = "avalon_attrs"
- value = self.entities_dict[child_id][store_key][name]
- if value is not None:
- _hier_values[name] = value
-
- self.entities_dict[child_id]["hier_attrs"].update(_hier_values)
- hier_down_queue.put((_hier_values, child_id))
-
- def remove_from_archived(self, mongo_id):
- entity = self.avalon_archived_by_id.pop(mongo_id, None)
- if not entity:
- return
-
- if self._avalon_archived_ents is not None:
- if entity in self._avalon_archived_ents:
- self._avalon_archived_ents.remove(entity)
-
- if self._avalon_archived_by_name is not None:
- name = entity["name"]
- if name in self._avalon_archived_by_name:
- name_ents = self._avalon_archived_by_name[name]
- if entity in name_ents:
- if len(name_ents) == 1:
- self._avalon_archived_by_name.pop(name)
- else:
- self._avalon_archived_by_name[name].remove(entity)
-
- # TODO use custom None instead of __NOTSET__
- if self._avalon_archived_by_parent_id is not None:
- parent_id = entity.get("data", {}).get(
- "visualParent", "__NOTSET__"
- )
- if parent_id is not None:
- parent_id = str(parent_id)
-
- if parent_id in self._avalon_archived_by_parent_id:
- parent_list = self._avalon_archived_by_parent_id[parent_id]
- if entity not in parent_list:
- self._avalon_archived_by_parent_id[parent_id].remove(
- entity
- )
-
- def prepare_ftrack_ent_data(self):
- not_set_ids = []
- for id, entity_dict in self.entities_dict.items():
- entity = entity_dict["entity"]
- if entity is None:
- not_set_ids.append(id)
- continue
-
- self.entities_dict[id]["final_entity"] = {}
- self.entities_dict[id]["final_entity"]["name"] = (
- entity_dict["name"]
- )
- data = {}
- data["ftrackId"] = entity["id"]
- data["entityType"] = entity_dict["entity_type_orig"]
-
- for key, val in entity_dict.get("custom_attributes", []).items():
- data[key] = val
-
- for key, val in entity_dict.get("hier_attrs", []).items():
- data[key] = val
-
- if id == self.ft_project_id:
- data["code"] = entity["name"]
- self.entities_dict[id]["final_entity"]["data"] = data
- self.entities_dict[id]["final_entity"]["type"] = "project"
-
- proj_schema = entity["project_schema"]
- task_types = proj_schema["_task_type_schema"]["types"]
- self.entities_dict[id]["final_entity"]["config"] = {
- "tasks": [{"name": tt["name"]} for tt in task_types],
- "apps": self.get_project_apps(data)
- }
- continue
-
- ent_path_items = [ent["name"] for ent in entity["link"]]
- parents = ent_path_items[1:len(ent_path_items)-1:]
- hierarchy = ""
- if len(parents) > 0:
- hierarchy = os.path.sep.join(parents)
-
- data["parents"] = parents
- data["hierarchy"] = hierarchy
- data["tasks"] = self.entities_dict[id].pop("tasks", [])
- self.entities_dict[id]["final_entity"]["data"] = data
- self.entities_dict[id]["final_entity"]["type"] = "asset"
-
- if not_set_ids:
- self.log.debug((
- "- Debug information: Filtering bug, in entities dict are "
- "empty dicts (function should not affect) <{}>"
- ).format("| ".join(not_set_ids)))
- for id in not_set_ids:
- self.entities_dict.pop(id)
-
- def get_project_apps(self, proj_data):
- apps = []
- missing_toml_msg = "Missing config file for application"
- error_msg = (
- "Unexpected error happend during preparation of application"
- )
- for app in proj_data.get("applications"):
- try:
- toml_path = avalon.lib.which_app(app)
- # TODO report
- if not toml_path:
- self.log.warning(missing_toml_msg + '"{}"'.format(app))
- self.report_items["warning"][missing_toml_msg].append(app)
- continue
-
- apps.append({
- "name": app,
- "label": toml.load(toml_path)["label"]
- })
- except Exception:
- # TODO report
- self.report_items["warning"][error_msg].append(app)
- self.log.warning((
- "Error has happened during preparing application \"{}\""
- ).format(app), exc_info=True)
- return apps
-
- def get_ent_path(self, ftrack_id):
- ent_path = self._ent_pats_by_ftrack_id.get(ftrack_id)
- if not ent_path:
- entity = self.entities_dict[ftrack_id]["entity"]
- ent_path = "/".join(
- [ent["name"] for ent in entity["link"]]
- )
- self._ent_pats_by_ftrack_id[ftrack_id] = ent_path
-
- return ent_path
-
- def prepare_avalon_entities(self, ft_project_name):
- self.log.debug((
- "* Preparing avalon entities "
- "(separate to Create, Update and Deleted groups)"
- ))
- # Avalon entities
- self.dbcon.install()
- self.dbcon.Session["AVALON_PROJECT"] = ft_project_name
- avalon_project = self.dbcon.find_one({"type": "project"})
- avalon_entities = self.dbcon.find({"type": "asset"})
- self.avalon_project = avalon_project
- self.avalon_entities = avalon_entities
-
- ftrack_avalon_mapper = {}
- avalon_ftrack_mapper = {}
- create_ftrack_ids = []
- update_ftrack_ids = []
-
- same_mongo_id = []
- all_mongo_ids = {}
- for ftrack_id, entity_dict in self.entities_dict.items():
- mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
- if not mongo_id:
- continue
- if mongo_id in all_mongo_ids:
- same_mongo_id.append(mongo_id)
- else:
- all_mongo_ids[mongo_id] = []
- all_mongo_ids[mongo_id].append(ftrack_id)
-
- if avalon_project:
- mongo_id = str(avalon_project["_id"])
- ftrack_avalon_mapper[self.ft_project_id] = mongo_id
- avalon_ftrack_mapper[mongo_id] = self.ft_project_id
- update_ftrack_ids.append(self.ft_project_id)
- else:
- create_ftrack_ids.append(self.ft_project_id)
-
- # make it go hierarchically
- prepare_queue = queue.Queue()
-
- for child_id in self.entities_dict[self.ft_project_id]["children"]:
- prepare_queue.put(child_id)
-
- while not prepare_queue.empty():
- ftrack_id = prepare_queue.get()
- for child_id in self.entities_dict[ftrack_id]["children"]:
- prepare_queue.put(child_id)
-
- entity_dict = self.entities_dict[ftrack_id]
- ent_path = self.get_ent_path(ftrack_id)
-
- mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
- av_ent_by_mongo_id = self.avalon_ents_by_id.get(mongo_id)
- if av_ent_by_mongo_id:
- av_ent_ftrack_id = av_ent_by_mongo_id.get("data", {}).get(
- "ftrackId"
- )
- is_right = False
- else_match_better = False
- if av_ent_ftrack_id and av_ent_ftrack_id == ftrack_id:
- is_right = True
-
- elif mongo_id not in same_mongo_id:
- is_right = True
-
- else:
- ftrack_ids_with_same_mongo = all_mongo_ids[mongo_id]
- for _ftrack_id in ftrack_ids_with_same_mongo:
- if _ftrack_id == av_ent_ftrack_id:
- continue
-
- _entity_dict = self.entities_dict[_ftrack_id]
- _mongo_id = _entity_dict["avalon_attrs"][
- self.id_cust_attr
- ]
- _av_ent_by_mongo_id = self.avalon_ents_by_id.get(
- _mongo_id
- )
- _av_ent_ftrack_id = _av_ent_by_mongo_id.get(
- "data", {}
- ).get("ftrackId")
- if _av_ent_ftrack_id == ftrack_id:
- else_match_better = True
- break
-
- if not is_right and not else_match_better:
- entity = entity_dict["entity"]
- ent_path_items = [ent["name"] for ent in entity["link"]]
- parents = ent_path_items[1:len(ent_path_items)-1:]
- av_parents = av_ent_by_mongo_id["data"]["parents"]
- if av_parents == parents:
- is_right = True
- else:
- name = entity_dict["name"]
- av_name = av_ent_by_mongo_id["name"]
- if name == av_name:
- is_right = True
-
- if is_right:
- self.log.debug(
- "Existing (by MongoID) <{}>".format(ent_path)
- )
- ftrack_avalon_mapper[ftrack_id] = mongo_id
- avalon_ftrack_mapper[mongo_id] = ftrack_id
- update_ftrack_ids.append(ftrack_id)
- continue
-
- mongo_id = self.avalon_ents_by_ftrack_id.get(ftrack_id)
- if not mongo_id:
- mongo_id = self.avalon_ents_by_name.get(entity_dict["name"])
- if mongo_id:
- self.log.debug(
- "Existing (by matching name) <{}>".format(ent_path)
- )
- else:
- self.log.debug(
- "Existing (by FtrackID in mongo) <{}>".format(ent_path)
- )
-
- if mongo_id:
- ftrack_avalon_mapper[ftrack_id] = mongo_id
- avalon_ftrack_mapper[mongo_id] = ftrack_id
- update_ftrack_ids.append(ftrack_id)
- continue
-
- self.log.debug("New <{}>".format(ent_path))
- create_ftrack_ids.append(ftrack_id)
-
- deleted_entities = []
- for mongo_id in self.avalon_ents_by_id:
- if mongo_id in avalon_ftrack_mapper:
- continue
- deleted_entities.append(mongo_id)
-
- av_ent = self.avalon_ents_by_id[mongo_id]
- av_ent_path_items = [p for p in av_ent["data"]["parents"]]
- av_ent_path_items.append(av_ent["name"])
- self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items)))
-
- self.ftrack_avalon_mapper = ftrack_avalon_mapper
- self.avalon_ftrack_mapper = avalon_ftrack_mapper
- self.create_ftrack_ids = create_ftrack_ids
- self.update_ftrack_ids = update_ftrack_ids
- self.deleted_entities = deleted_entities
-
- self.log.debug((
- "Ftrack -> Avalon comparation: New <{}> "
- "| Existing <{}> | Deleted <{}>"
- ).format(
- len(create_ftrack_ids),
- len(update_ftrack_ids),
- len(deleted_entities)
- ))
-
- def filter_with_children(self, ftrack_id):
- if ftrack_id not in self.entities_dict:
- return
- ent_dict = self.entities_dict[ftrack_id]
- parent_id = ent_dict["parent_id"]
- self.entities_dict[parent_id]["children"].remove(ftrack_id)
-
- children_queue = queue.Queue()
- children_queue.put(ftrack_id)
- while not children_queue.empty():
- _ftrack_id = children_queue.get()
- entity_dict = self.entities_dict.pop(_ftrack_id, {"children": []})
- for child_id in entity_dict["children"]:
- children_queue.put(child_id)
-
- def prepare_changes(self):
- self.log.debug("* Preparing changes for avalon/ftrack")
- hierarchy_changing_ids = []
- ignore_keys = collections.defaultdict(list)
-
- update_queue = queue.Queue()
- for ftrack_id in self.update_ftrack_ids:
- update_queue.put(ftrack_id)
-
- while not update_queue.empty():
- ftrack_id = update_queue.get()
- if ftrack_id == self.ft_project_id:
- changes = self.prepare_project_changes()
- if changes:
- self.updates[self.avalon_project_id] = changes
- continue
-
- ftrack_ent_dict = self.entities_dict[ftrack_id]
-
- # *** check parents
- parent_check = False
-
- ftrack_parent_id = ftrack_ent_dict["parent_id"]
- avalon_id = self.ftrack_avalon_mapper[ftrack_id]
- avalon_entity = self.avalon_ents_by_id[avalon_id]
- avalon_parent_id = avalon_entity["data"]["visualParent"]
- if avalon_parent_id is not None:
- avalon_parent_id = str(avalon_parent_id)
-
- ftrack_parent_mongo_id = self.ftrack_avalon_mapper[
- ftrack_parent_id
- ]
-
- # if parent is project
- if (ftrack_parent_mongo_id == avalon_parent_id) or (
- ftrack_parent_id == self.ft_project_id and
- avalon_parent_id is None
- ):
- parent_check = True
-
- # check name
- ftrack_name = ftrack_ent_dict["name"]
- avalon_name = avalon_entity["name"]
- name_check = ftrack_name == avalon_name
-
- # IDEAL STATE: both parent and name check passed
- if parent_check and name_check:
- continue
-
- # If entity is changeable then change values of parent or name
- if self.changeability_by_mongo_id[avalon_id]:
- # TODO logging
- if not parent_check:
- if ftrack_parent_mongo_id == str(self.avalon_project_id):
- new_parent_name = self.entities_dict[
- self.ft_project_id]["name"]
- new_parent_id = None
- else:
- new_parent_name = self.avalon_ents_by_id[
- ftrack_parent_mongo_id]["name"]
- new_parent_id = ObjectId(ftrack_parent_mongo_id)
-
- if avalon_parent_id == str(self.avalon_project_id):
- old_parent_name = self.entities_dict[
- self.ft_project_id]["name"]
- else:
- old_parent_name = self.avalon_ents_by_id[
- ftrack_parent_mongo_id]["name"]
-
- self.updates[avalon_id]["data"] = {
- "visualParent": new_parent_id
- }
- ignore_keys[ftrack_id].append("data.visualParent")
- self.log.debug((
- "Avalon entity \"{}\" changed parent \"{}\" -> \"{}\""
- ).format(avalon_name, old_parent_name, new_parent_name))
-
- if not name_check:
- self.updates[avalon_id]["name"] = ftrack_name
- ignore_keys[ftrack_id].append("name")
- self.log.debug(
- "Avalon entity \"{}\" was renamed to \"{}\"".format(
- avalon_name, ftrack_name
- )
- )
- continue
-
- # parents and hierarchy must be recalculated
- hierarchy_changing_ids.append(ftrack_id)
-
- # Parent is project if avalon_parent_id is set to None
- if avalon_parent_id is None:
- avalon_parent_id = str(self.avalon_project_id)
-
- if not name_check:
- ent_path = self.get_ent_path(ftrack_id)
- # TODO report
- # TODO logging
- self.entities_dict[ftrack_id]["name"] = avalon_name
- self.entities_dict[ftrack_id]["entity"]["name"] = (
- avalon_name
- )
- self.entities_dict[ftrack_id]["final_entity"]["name"] = (
- avalon_name
- )
- self.log.warning("Name was changed back to {} <{}>".format(
- avalon_name, ent_path
- ))
- self._ent_pats_by_ftrack_id.pop(ftrack_id, None)
- msg = (
- " It is not allowed to change"
- " name of entity or it's parents"
- " that already has published context"
- )
- self.report_items["warning"][msg].append(ent_path)
-
- # skip parent oricessing if hierarchy didn't change
- if parent_check:
- continue
-
- # Logic when parenting(hierarchy) has changed and should not
- old_ftrack_parent_id = self.avalon_ftrack_mapper.get(
- avalon_parent_id
- )
-
- # If last ftrack parent id from mongo entity exist then just
- # remap paren_id on entity
- if old_ftrack_parent_id:
- # TODO report
- # TODO logging
- ent_path = self.get_ent_path(ftrack_id)
- msg = (
- " It is not allowed"
- " to change hierarchy of entity or it's parents"
- " that already has published context"
- )
- self.report_items["warning"][msg].append(ent_path)
- self.log.warning((
- "Entity has published context so was moved"
- " back in hierarchy <{}>"
- ).format(ent_path))
- self.entities_dict[ftrack_id]["entity"]["parent_id"] = (
- old_ftrack_parent_id
- )
- self.entities_dict[ftrack_id]["parent_id"] = (
- old_ftrack_parent_id
- )
- self.entities_dict[old_ftrack_parent_id][
- "children"
- ].append(ftrack_id)
-
- continue
-
- old_parent_ent = self.avalon_ents_by_id.get(avalon_parent_id)
- if not old_parent_ent:
- old_parent_ent = self.avalon_archived_by_id.get(
- avalon_parent_id
- )
-
- # TODO report
- # TODO logging
- if not old_parent_ent:
- self.log.warning((
- "Parent entity was not found by id"
- " - Trying to find by parent name"
- ))
- ent_path = self.get_ent_path(ftrack_id)
-
- parents = avalon_entity["data"]["parents"]
- parent_name = parents[-1]
- matching_entity_id = None
- for id, entity_dict in self.entities_dict.items():
- if entity_dict["name"] == parent_name:
- matching_entity_id = id
- break
-
- if matching_entity_id is None:
- # TODO logging
- # TODO report (turn off auto-sync?)
- self.log.error((
- "Entity has published context but was moved in"
- " hierarchy and previous parent was not found so it is"
- " not possible to solve this programmatically <{}>"
- ).format(ent_path))
- msg = (
- " Parent of entity can't be"
- " changed due to published context and previous parent"
- " was not found"
- )
- self.report_items["error"][msg].append(ent_path)
- self.filter_with_children(ftrack_id)
- continue
-
- matching_ent_dict = self.entities_dict.get(matching_entity_id)
- match_ent_parents = matching_ent_dict.get(
- "final_entity", {}).get(
- "data", {}).get(
- "parents", ["__NOT_SET__"]
- )
- # TODO logging
- # TODO report
- if (
- len(match_ent_parents) >= len(parents) or
- match_ent_parents[:-1] != parents
- ):
- ent_path = self.get_ent_path(ftrack_id)
- self.log.error((
- "Entity has published context but was moved in"
- " hierarchy and previous parents were moved too it is"
- " not possible to solve this programmatically <{}>"
- ).format(ent_path))
- msg = (
- " Parent of entity can't be"
- " changed due to published context but whole hierarchy"
- " was scrambled"
- )
- continue
-
- old_parent_ent = matching_ent_dict["final_entity"]
-
- parent_id = self.ft_project_id
- entities_to_create = []
- # TODO logging
- self.log.warning(
- "Ftrack entities must be recreated because have"
- " published context but were removed"
- )
-
- _avalon_ent = old_parent_ent
-
- self.updates[avalon_parent_id] = {"type": "asset"}
- success = True
- while True:
- _vis_par = _avalon_ent["data"]["visualParent"]
- _name = _avalon_ent["name"]
- if _name in self.all_ftrack_names:
- av_ent_path_items = _avalon_ent["data"]["parents"]
- av_ent_path_items.append(_name)
- av_ent_path = "/".join(av_ent_path_items)
- # TODO report
- # TODO logging
- self.log.error((
- "Can't recreate entity in Ftrack because entity with"
- " same name already exists in different hierarchy <{}>"
- ).format(av_ent_path))
- msg = (
- " Parent of entity can't be"
- " changed due to published context but previous parent"
- " had name that exist in different hierarchy level"
- )
- self.report_items["error"][msg].append(av_ent_path)
- self.filter_with_children(ftrack_id)
- success = False
- break
-
- entities_to_create.append(_avalon_ent)
- if _vis_par is None:
- break
-
- _vis_par = str(_vis_par)
- _mapped = self.avalon_ftrack_mapper.get(_vis_par)
- if _mapped:
- parent_id = _mapped
- break
-
- _avalon_ent = self.avalon_ents_by_id.get(_vis_par)
- if not _avalon_ent:
- _avalon_ent = self.avalon_archived_by_id.get(_vis_par)
-
- if success is False:
- continue
-
- new_entity_id = None
- for av_entity in reversed(entities_to_create):
- new_entity_id = self.create_ftrack_ent_from_avalon_ent(
- av_entity, parent_id
- )
- update_queue.put(new_entity_id)
-
- if new_entity_id:
- ftrack_ent_dict["entity"]["parent_id"] = new_entity_id
-
- if hierarchy_changing_ids:
- self.reload_parents(hierarchy_changing_ids)
-
- for ftrack_id in self.update_ftrack_ids:
- if ftrack_id == self.ft_project_id:
- continue
-
- avalon_id = self.ftrack_avalon_mapper[ftrack_id]
- avalon_entity = self.avalon_ents_by_id[avalon_id]
-
- avalon_attrs = self.entities_dict[ftrack_id]["avalon_attrs"]
- if (
- self.id_cust_attr not in avalon_attrs or
- avalon_attrs[self.id_cust_attr] != avalon_id
- ):
- configuration_id = self.entities_dict[ftrack_id][
- "avalon_attrs_id"][self.id_cust_attr]
-
- _entity_key = collections.OrderedDict({
- "configuration_id": configuration_id,
- "entity_id": ftrack_id
- })
-
- self.session.recorded_operations.push(
- fa_session.ftrack_api.operation.UpdateEntityOperation(
- "ContextCustomAttributeValue",
- _entity_key,
- "value",
- fa_session.ftrack_api.symbol.NOT_SET,
- avalon_id
- )
- )
- # check rest of data
- data_changes = self.compare_dict(
- self.entities_dict[ftrack_id]["final_entity"],
- avalon_entity,
- ignore_keys[ftrack_id]
- )
- if data_changes:
- self.updates[avalon_id] = self.merge_dicts(
- data_changes,
- self.updates[avalon_id]
- )
-
- def synchronize(self):
- self.log.debug("* Synchronization begins")
- avalon_project_id = self.ftrack_avalon_mapper.get(self.ft_project_id)
- if avalon_project_id:
- self.avalon_project_id = ObjectId(avalon_project_id)
-
- # remove filtered ftrack ids from create/update list
- for ftrack_id in self.all_filtered_entities:
- if ftrack_id in self.create_ftrack_ids:
- self.create_ftrack_ids.remove(ftrack_id)
- elif ftrack_id in self.update_ftrack_ids:
- self.update_ftrack_ids.remove(ftrack_id)
-
- self.log.debug("* Processing entities for archivation")
- self.delete_entities()
-
- self.log.debug("* Processing new entities")
- # Create not created entities
- for ftrack_id in self.create_ftrack_ids:
- # CHECK it is possible that entity was already created
- # because is parent of another entity which was processed first
- if ftrack_id in self.ftrack_avalon_mapper:
- continue
- self.create_avalon_entity(ftrack_id)
-
- if len(self.create_list) > 0:
- self.dbcon.insert_many(self.create_list)
-
- self.session.commit()
-
- self.log.debug("* Processing entities for update")
- self.prepare_changes()
- self.update_entities()
- self.session.commit()
-
- def create_avalon_entity(self, ftrack_id):
- if ftrack_id == self.ft_project_id:
- self.create_avalon_project()
- return
-
- entity_dict = self.entities_dict[ftrack_id]
- parent_ftrack_id = entity_dict["parent_id"]
- avalon_parent = None
- if parent_ftrack_id != self.ft_project_id:
- avalon_parent = self.ftrack_avalon_mapper.get(parent_ftrack_id)
- # if not avalon_parent:
- # self.create_avalon_entity(parent_ftrack_id)
- # avalon_parent = self.ftrack_avalon_mapper[parent_ftrack_id]
- avalon_parent = ObjectId(avalon_parent)
-
- # avalon_archived_by_id avalon_archived_by_name
- current_id = (
- entity_dict["avalon_attrs"].get(self.id_cust_attr) or ""
- ).strip()
- mongo_id = current_id
- name = entity_dict["name"]
-
- # Check if exist archived asset in mongo - by ID
- unarchive = False
- unarchive_id = self.check_unarchivation(ftrack_id, mongo_id, name)
- if unarchive_id is not None:
- unarchive = True
- mongo_id = unarchive_id
-
- item = entity_dict["final_entity"]
- try:
- new_id = ObjectId(mongo_id)
- if mongo_id in self.avalon_ftrack_mapper:
- new_id = ObjectId()
- except InvalidId:
- new_id = ObjectId()
-
- item["_id"] = new_id
- item["parent"] = self.avalon_project_id
- item["schema"] = self.entity_schemas["asset"]
- item["data"]["visualParent"] = avalon_parent
-
- new_id_str = str(new_id)
- self.ftrack_avalon_mapper[ftrack_id] = new_id_str
- self.avalon_ftrack_mapper[new_id_str] = ftrack_id
-
- self._avalon_ents_by_id[new_id_str] = item
- self._avalon_ents_by_ftrack_id[ftrack_id] = new_id_str
- self._avalon_ents_by_name[item["name"]] = new_id_str
-
- if current_id != new_id_str:
- # store mongo id to ftrack entity
- configuration_id = self.entities_dict[ftrack_id][
- "avalon_attrs_id"
- ][self.id_cust_attr]
- _entity_key = collections.OrderedDict({
- "configuration_id": configuration_id,
- "entity_id": ftrack_id
- })
-
- self.session.recorded_operations.push(
- fa_session.ftrack_api.operation.UpdateEntityOperation(
- "ContextCustomAttributeValue",
- _entity_key,
- "value",
- fa_session.ftrack_api.symbol.NOT_SET,
- new_id_str
- )
- )
-
- if unarchive is False:
- self.create_list.append(item)
- return
- # If unarchive then replace entity data in database
- self.dbcon.replace_one({"_id": new_id}, item)
- self.remove_from_archived(mongo_id)
- av_ent_path_items = item["data"]["parents"]
- av_ent_path_items.append(item["name"])
- av_ent_path = "/".join(av_ent_path_items)
- self.log.debug("Entity was unarchived <{}>".format(av_ent_path))
-
- def check_unarchivation(self, ftrack_id, mongo_id, name):
- archived_by_id = self.avalon_archived_by_id.get(mongo_id)
- archived_by_name = self.avalon_archived_by_name.get(name)
-
- # if not found in archived then skip
- if not archived_by_id and not archived_by_name:
- return None
-
- entity_dict = self.entities_dict[ftrack_id]
-
- if archived_by_id:
- # if is changeable then unarchive (nothing to check here)
- if self.changeability_by_mongo_id[mongo_id]:
- return mongo_id
-
- # TODO replace `__NOTSET__` with custom None constant
- archived_parent_id = archived_by_id["data"].get(
- "visualParent", "__NOTSET__"
- )
- archived_parents = archived_by_id["data"].get("parents")
- archived_name = archived_by_id["name"]
-
- if (
- archived_name != entity_dict["name"] or
- archived_parents != entity_dict["final_entity"]["data"][
- "parents"
- ]
- ):
- return None
-
- return mongo_id
-
- # First check if there is any that have same parents
- for archived in archived_by_name:
- mongo_id = str(archived["_id"])
- archived_parents = archived.get("data", {}).get("parents")
- if (
- archived_parents == entity_dict["final_entity"]["data"][
- "parents"
- ]
- ):
- return mongo_id
-
- # Secondly try to find more close to current ftrack entity
- first_changeable = None
- for archived in archived_by_name:
- mongo_id = str(archived["_id"])
- if not self.changeability_by_mongo_id[mongo_id]:
- continue
-
- if first_changeable is None:
- first_changeable = mongo_id
-
- ftrack_parent_id = entity_dict["parent_id"]
- map_ftrack_parent_id = self.ftrack_avalon_mapper.get(
- ftrack_parent_id
- )
-
- # TODO replace `__NOTSET__` with custom None constant
- archived_parent_id = archived.get("data", {}).get(
- "visualParent", "__NOTSET__"
- )
- if archived_parent_id is not None:
- archived_parent_id = str(archived_parent_id)
-
- # skip if parent is archived - How this should be possible?
- parent_entity = self.avalon_ents_by_id.get(archived_parent_id)
- if (
- parent_entity and (
- map_ftrack_parent_id is not None and
- map_ftrack_parent_id == str(parent_entity["_id"])
- )
- ):
- return mongo_id
- # Last return first changeable with same name (or None)
- return first_changeable
-
- def create_avalon_project(self):
- project_item = self.entities_dict[self.ft_project_id]["final_entity"]
- mongo_id = (
- self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
- self.id_cust_attr
- ) or ""
- ).strip()
-
- try:
- new_id = ObjectId(mongo_id)
- except InvalidId:
- new_id = ObjectId()
-
- project_item["_id"] = new_id
- project_item["parent"] = None
- project_item["schema"] = self.entity_schemas["project"]
- project_item["config"]["schema"] = self.entity_schemas["config"]
- project_item["config"]["template"] = self.get_avalon_project_template()
-
- self.ftrack_avalon_mapper[self.ft_project_id] = new_id
- self.avalon_ftrack_mapper[new_id] = self.ft_project_id
-
- self.avalon_project_id = new_id
-
- self._avalon_ents_by_id[str(new_id)] = project_item
- self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id)
- self._avalon_ents_by_name[project_item["name"]] = str(new_id)
-
- self.create_list.append(project_item)
-
- # store mongo id to ftrack entity
- entity = self.entities_dict[self.ft_project_id]["entity"]
- entity["custom_attributes"][self.id_cust_attr] = str(new_id)
-
- def get_avalon_project_template(self):
- """Get avalon template
- Returns:
- dictionary with templates
- """
- project_name = self.entities_dict[self.ft_project_id]["name"]
- templates = Anatomy(project_name).templates
- return {
- "workfile": templates["avalon"]["workfile"],
- "work": templates["avalon"]["work"],
- "publish": templates["avalon"]["publish"]
- }
-
- def _bubble_changeability(self, unchangeable_ids):
- unchangeable_queue = queue.Queue()
- for entity_id in unchangeable_ids:
- unchangeable_queue.put((entity_id, False))
-
- processed_parents_ids = []
- subsets_to_remove = []
- while not unchangeable_queue.empty():
- entity_id, child_is_archived = unchangeable_queue.get()
- # skip if already processed
- if entity_id in processed_parents_ids:
- continue
-
- entity = self.avalon_ents_by_id.get(entity_id)
- # if entity is not archived but unchageable child was then skip
- # - archived entities should not affect not archived?
- if entity and child_is_archived:
- continue
-
- # set changeability of current entity to False
- self._changeability_by_mongo_id[entity_id] = False
- processed_parents_ids.append(entity_id)
- # if not entity then is probably archived
- if not entity:
- entity = self.avalon_archived_by_id.get(entity_id)
- child_is_archived = True
-
- if not entity:
- # if entity is not found then it is subset without parent
- if entity_id in unchangeable_ids:
- subsets_to_remove.append(entity_id)
- else:
- # TODO logging - What is happening here?
- self.log.warning((
- "In avalon are entities without valid parents that"
- " lead to Project (should not cause errors)"
- " - MongoId <{}>"
- ).format(str(entity_id)))
- continue
-
- # skip if parent is project
- parent_id = entity["data"]["visualParent"]
- if parent_id is None:
- continue
- unchangeable_queue.put((str(parent_id), child_is_archived))
-
- self._delete_subsets_without_asset(subsets_to_remove)
-
- def _delete_subsets_without_asset(self, not_existing_parents):
- subset_ids = []
- version_ids = []
- repre_ids = []
- to_delete = []
-
- for parent_id in not_existing_parents:
- subsets = self.subsets_by_parent_id.get(parent_id)
- if not subsets:
- continue
- for subset in subsets:
- if subset.get("type") != "subset":
- continue
- subset_ids.append(subset["_id"])
-
- db_subsets = self.dbcon.find({
- "_id": {"$in": subset_ids},
- "type": "subset"
- })
- if not db_subsets:
- return
-
- db_versions = self.dbcon.find({
- "parent": {"$in": subset_ids},
- "type": "version"
- })
- if db_versions:
- version_ids = [ver["_id"] for ver in db_versions]
-
- db_repres = self.dbcon.find({
- "parent": {"$in": version_ids},
- "type": "representation"
- })
- if db_repres:
- repre_ids = [repre["_id"] for repre in db_repres]
-
- to_delete.extend(subset_ids)
- to_delete.extend(version_ids)
- to_delete.extend(repre_ids)
-
- self.dbcon.delete_many({"_id": {"$in": to_delete}})
-
- # Probably deprecated
- def _check_changeability(self, parent_id=None):
- for entity in self.avalon_ents_by_parent_id[parent_id]:
- mongo_id = str(entity["_id"])
- is_changeable = self._changeability_by_mongo_id.get(mongo_id)
- if is_changeable is not None:
- continue
-
- self._check_changeability(mongo_id)
- is_changeable = True
- for child in self.avalon_ents_by_parent_id[parent_id]:
- if not self._changeability_by_mongo_id[str(child["_id"])]:
- is_changeable = False
- break
-
- if is_changeable is True:
- is_changeable = (mongo_id in self.subsets_by_parent_id)
- self._changeability_by_mongo_id[mongo_id] = is_changeable
-
- def update_entities(self):
- mongo_changes_bulk = []
- for mongo_id, changes in self.updates.items():
- filter = {"_id": ObjectId(mongo_id)}
- change_data = self.from_dict_to_set(changes)
- mongo_changes_bulk.append(UpdateOne(filter, change_data))
-
- if not mongo_changes_bulk:
- # TODO LOG
- return
- self.dbcon.bulk_write(mongo_changes_bulk)
-
- def from_dict_to_set(self, data):
- result = {"$set": {}}
- dict_queue = queue.Queue()
- dict_queue.put((None, data))
-
- while not dict_queue.empty():
- _key, _data = dict_queue.get()
- for key, value in _data.items():
- new_key = key
- if _key is not None:
- new_key = "{}.{}".format(_key, key)
-
- if not isinstance(value, dict):
- result["$set"][new_key] = value
- continue
- dict_queue.put((new_key, value))
- return result
-
- def reload_parents(self, hierarchy_changing_ids):
- parents_queue = queue.Queue()
- parents_queue.put((self.ft_project_id, [], False))
- while not parents_queue.empty():
- ftrack_id, parent_parents, changed = parents_queue.get()
- _parents = parent_parents.copy()
- if ftrack_id not in hierarchy_changing_ids and not changed:
- if ftrack_id != self.ft_project_id:
- _parents.append(self.entities_dict[ftrack_id]["name"])
- for child_id in self.entities_dict[ftrack_id]["children"]:
- parents_queue.put((child_id, _parents, changed))
- continue
-
- changed = True
- parents = [par for par in _parents]
- hierarchy = "/".join(parents)
- self.entities_dict[ftrack_id][
- "final_entity"]["data"]["parents"] = parents
- self.entities_dict[ftrack_id][
- "final_entity"]["data"]["hierarchy"] = hierarchy
-
- _parents.append(self.entities_dict[ftrack_id]["name"])
- for child_id in self.entities_dict[ftrack_id]["children"]:
- parents_queue.put((child_id, _parents, changed))
-
- if ftrack_id in self.create_ftrack_ids:
- mongo_id = self.ftrack_avalon_mapper[ftrack_id]
- if "data" not in self.updates[mongo_id]:
- self.updates[mongo_id]["data"] = {}
- self.updates[mongo_id]["data"]["parents"] = parents
- self.updates[mongo_id]["data"]["hierarchy"] = hierarchy
-
- def prepare_project_changes(self):
- ftrack_ent_dict = self.entities_dict[self.ft_project_id]
- ftrack_entity = ftrack_ent_dict["entity"]
- avalon_code = self.avalon_project["data"]["code"]
- # TODO Is possible to sync if full name was changed?
- # if ftrack_ent_dict["name"] != self.avalon_project["name"]:
- # ftrack_entity["full_name"] = avalon_name
- # self.entities_dict[self.ft_project_id]["name"] = avalon_name
- # self.entities_dict[self.ft_project_id]["final_entity"][
- # "name"
- # ] = avalon_name
-
- # TODO logging
- # TODO report
- # TODO May this happen? Is possible to change project code?
- if ftrack_entity["name"] != avalon_code:
- ftrack_entity["name"] = avalon_code
- self.entities_dict[self.ft_project_id]["final_entity"]["data"][
- "code"
- ] = avalon_code
- self.session.commit()
- sub_msg = (
- "Project code was changed back to \"{}\"".format(avalon_code)
- )
- msg = (
- "It is not allowed to change"
- " project code after synchronization"
- )
- self.report_items["warning"][msg] = sub_msg
- self.log.warning(sub_msg)
-
- return self.compare_dict(
- self.entities_dict[self.ft_project_id]["final_entity"],
- self.avalon_project
- )
-
- def compare_dict(self, dict_new, dict_old, _ignore_keys=[]):
- # _ignore_keys may be used for keys nested dict like"data.visualParent"
- changes = {}
- ignore_keys = []
- for key_val in _ignore_keys:
- key_items = key_val.split(".")
- if len(key_items) == 1:
- ignore_keys.append(key_items[0])
-
- for key, value in dict_new.items():
- if key in ignore_keys:
- continue
-
- if key not in dict_old:
- changes[key] = value
- continue
-
- if isinstance(value, dict):
- if not isinstance(dict_old[key], dict):
- changes[key] = value
- continue
-
- _new_ignore_keys = []
- for key_val in _ignore_keys:
- key_items = key_val.split(".")
- if len(key_items) <= 1:
- continue
- _new_ignore_keys.append(".".join(key_items[1:]))
-
- _changes = self.compare_dict(
- value, dict_old[key], _new_ignore_keys
- )
- if _changes:
- changes[key] = _changes
- continue
-
- if value != dict_old[key]:
- changes[key] = value
-
- return changes
-
- def merge_dicts(self, dict_new, dict_old):
- # _ignore_keys may be used for keys nested dict like"data.visualParent"
- for key, value in dict_new.items():
- if key not in dict_old:
- dict_old[key] = value
- continue
-
- if isinstance(value, dict):
- dict_old[key] = self.merge_dicts(value, dict_old[key])
- continue
-
- dict_old[key] = value
-
- return dict_old
-
- def delete_entities(self):
- if not self.deleted_entities:
- return
- # Try to order so child is not processed before parent
- deleted_entities = []
- _deleted_entities = [id for id in self.deleted_entities]
-
- while True:
- if not _deleted_entities:
- break
- _ready = []
- for mongo_id in _deleted_entities:
- ent = self.avalon_ents_by_id[mongo_id]
- vis_par = ent["data"]["visualParent"]
- if (
- vis_par is not None and
- str(vis_par) in self.deleted_entities
- ):
- continue
- _ready.append(mongo_id)
-
- for id in _ready:
- deleted_entities.append(id)
- _deleted_entities.remove(id)
-
- delete_ids = []
- for mongo_id in deleted_entities:
- # delete if they are deletable
- if self.changeability_by_mongo_id[mongo_id]:
- delete_ids.append(ObjectId(mongo_id))
- continue
-
- # check if any new created entity match same entity
- # - name and parents must match
- deleted_entity = self.avalon_ents_by_id[mongo_id]
- name = deleted_entity["name"]
- parents = deleted_entity["data"]["parents"]
- similar_ent_id = None
- for ftrack_id in self.create_ftrack_ids:
- _ent_final = self.entities_dict[ftrack_id]["final_entity"]
- if _ent_final["name"] != name:
- continue
- if _ent_final["data"]["parents"] != parents:
- continue
-
- # If in create is "same" then we can "archive" current
- # since will be unarchived in create method
- similar_ent_id = ftrack_id
- break
-
- # If similar entity(same name and parents) is in create
- # entities list then just change from create to update
- if similar_ent_id is not None:
- self.create_ftrack_ids.remove(similar_ent_id)
- self.update_ftrack_ids.append(similar_ent_id)
- self.avalon_ftrack_mapper[mongo_id] = similar_ent_id
- self.ftrack_avalon_mapper[similar_ent_id] = mongo_id
- continue
-
- found_by_name_id = None
- for ftrack_id, ent_dict in self.entities_dict.items():
- if not ent_dict.get("name"):
- continue
-
- if name == ent_dict["name"]:
- found_by_name_id = ftrack_id
- break
-
- if found_by_name_id is not None:
- # * THESE conditins are too complex to implement in first stage
- # - probably not possible to solve if this happen
- # if found_by_name_id in self.create_ftrack_ids:
- # # reparent entity of the new one create?
- # pass
- #
- # elif found_by_name_id in self.update_ftrack_ids:
- # found_mongo_id = self.ftrack_avalon_mapper[found_by_name_id]
- #
- # ent_dict = self.entities_dict[found_by_name_id]
-
- # TODO report - CRITICAL entity with same name alread exists in
- # different hierarchy - can't recreate entity
- continue
-
- _vis_parent = str(deleted_entity["data"]["visualParent"])
- if _vis_parent is None:
- _vis_parent = self.avalon_project_id
- ftrack_parent_id = self.avalon_ftrack_mapper[_vis_parent]
- self.create_ftrack_ent_from_avalon_ent(
- deleted_entity, ftrack_parent_id
- )
-
- filter = {"_id": {"$in": delete_ids}, "type": "asset"}
- self.dbcon.update_many(filter, {"$set": {"type": "archived_asset"}})
-
- def create_ftrack_ent_from_avalon_ent(self, av_entity, parent_id):
- new_entity = None
- parent_entity = self.entities_dict[parent_id]["entity"]
-
- _name = av_entity["name"]
- _type = av_entity["data"].get("entityType", "folder")
-
- self.log.debug((
- "Re-ceating deleted entity {} <{}>"
- ).format(_name, _type))
-
- new_entity = self.session.create(_type, {
- "name": _name,
- "parent": parent_entity
- })
-
- final_entity = {}
- for k, v in av_entity.items():
- final_entity[k] = v
-
- if final_entity.get("type") != "asset":
- final_entity["type"] = "asset"
-
- new_entity_id = new_entity["id"]
- new_entity_data = {
- "entity": new_entity,
- "parent_id": parent_id,
- "entity_type": _type.lower(),
- "entity_type_orig": _type,
- "name": _name,
- "final_entity": final_entity
- }
- for k, v in new_entity_data.items():
- self.entities_dict[new_entity_id][k] = v
-
- p_chilren = self.entities_dict[parent_id]["children"]
- if new_entity_id not in p_chilren:
- self.entities_dict[parent_id]["children"].append(new_entity_id)
-
- cust_attr, hier_attrs = self.get_avalon_attr()
- for _attr in cust_attr:
- key = _attr["key"]
- if key not in av_entity["data"]:
- continue
-
- if key not in new_entity["custom_attributes"]:
- continue
-
- value = av_entity["data"][key]
- if not value:
- continue
-
- new_entity["custom_attributes"][key] = value
-
- av_entity_id = str(av_entity["_id"])
- new_entity["custom_attributes"][self.id_cust_attr] = av_entity_id
-
- self.ftrack_avalon_mapper[new_entity_id] = av_entity_id
- self.avalon_ftrack_mapper[av_entity_id] = new_entity_id
-
- self.session.commit()
-
- ent_path = self.get_ent_path(new_entity_id)
- msg = (
- "Deleted entity was recreated because had (or his children)"
- " published context"
- )
-
- self.report_items["info"][msg].append(ent_path)
-
- return new_entity_id
-
- def regex_duplicate_interface(self):
- items = []
- if self.failed_regex or self.tasks_failed_regex:
- subtitle = "Not allowed symbols in entity names:"
- items.append({
- "type": "label",
- "value": "# {}".format(subtitle)
- })
- items.append({
- "type": "label",
- "value": (
- "NOTE: Allowed symbols are Letters( a-Z ),"
- " Numbers( 0-9 ) and Underscore( _ )
"
- )
- })
- log_msgs = []
- for name, ids in self.failed_regex.items():
- error_title = {
- "type": "label",
- "value": "## {}".format(name)
- }
- items.append(error_title)
- paths = []
- for entity_id in ids:
- ent_path = self.get_ent_path(entity_id)
- paths.append(ent_path)
-
- error_message = {
- "type": "label",
- "value": '{}
'.format("
".join(paths))
- }
- items.append(error_message)
- log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
-
- for name, ids in self.tasks_failed_regex.items():
- error_title = {
- "type": "label",
- "value": "## Task: {}".format(name)
- }
- items.append(error_title)
- paths = []
- for entity_id in ids:
- ent_path = self.get_ent_path(entity_id)
- ent_path = "/".join([ent_path, name])
- paths.append(ent_path)
-
- error_message = {
- "type": "label",
- "value": '{}
'.format("
".join(paths))
- }
- items.append(error_message)
- log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
-
- self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
-
- if self.duplicates:
- subtitle = "Duplicated entity names:"
- items.append({
- "type": "label",
- "value": "# {}".format(subtitle)
- })
- items.append({
- "type": "label",
- "value": (
- "NOTE: It is not allowed to have same name"
- " for multiple entities in one project
"
- )
- })
- log_msgs = []
- for name, ids in self.duplicates.items():
- error_title = {
- "type": "label",
- "value": "## {}".format(name)
- }
- items.append(error_title)
- paths = []
- for entity_id in ids:
- ent_path = self.get_ent_path(entity_id)
- paths.append(ent_path)
-
- error_message = {
- "type": "label",
- "value": '{}
'.format("
".join(paths))
- }
- items.append(error_message)
- log_msgs.append("<{}> ({})".format(name, ", ".join(paths)))
-
- self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
-
- return items
-
- def get_avalon_attr(self, split_hierarchical=True):
- custom_attributes = []
- hier_custom_attributes = []
- cust_attrs_query = (
- "select id, entity_type, object_type_id, is_hierarchical, default"
- " from CustomAttributeConfiguration"
- " where group.name = \"avalon\""
- )
- all_avalon_attr = self.session.query(cust_attrs_query).all()
- for cust_attr in all_avalon_attr:
- if split_hierarchical and cust_attr["is_hierarchical"]:
- hier_custom_attributes.append(cust_attr)
- continue
-
- custom_attributes.append(cust_attr)
-
- if split_hierarchical:
- # return tuple
- return custom_attributes, hier_custom_attributes
-
- return custom_attributes
-
- def report(self):
- items = []
- project_name = self.entities_dict[self.ft_project_id]["name"]
- title = "Synchronization report ({}):".format(project_name)
-
- keys = ["error", "warning", "info"]
- for key in keys:
- subitems = []
- if key == "warning":
- for _item in self.regex_duplicate_interface():
- subitems.append(_item)
-
- for msg, _items in self.report_items[key].items():
- if not _items:
- continue
-
- subitems.append({
- "type": "label",
- "value": "# {}".format(msg)
- })
- if isinstance(_items, str):
- _items = [_items]
- subitems.append({
- "type": "label",
- "value": '{}
'.format("
".join(_items))
- })
-
- if items and subitems:
- items.append(self.report_splitter)
-
- items.extend(subitems)
-
- return {
- "items": items,
- "title": title,
- "success": False,
- "message": "Synchronization Finished"
- }
+from pype.ftrack.lib.avalon_sync import SyncEntitiesFactory
+from pypeapp import config
class SyncToAvalonServer(BaseAction):
@@ -2179,13 +44,27 @@ class SyncToAvalonServer(BaseAction):
"PYPE_STATICS_SERVER",
"http://localhost:{}".format(
config.get_presets().get("services", {}).get(
- "statics_server", {}
+ "rest_api", {}
).get("default_port", 8021)
)
)
)
- #: roles that are allowed to register this action
- role_list = ["Pypeclub"]
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.entities_factory = SyncEntitiesFactory(self.log, self.session)
+
+ def register(self):
+ self.session.event_hub.subscribe(
+ "topic=ftrack.action.discover",
+ self._discover,
+ priority=self.priority
+ )
+
+ launch_subscription = (
+ "topic=ftrack.action.launch and data.actionIdentifier={0}"
+ ).format(self.identifier)
+ self.session.event_hub.subscribe(launch_subscription, self._launch)
def discover(self, session, entities, event):
""" Validation """
@@ -2213,8 +92,6 @@ class SyncToAvalonServer(BaseAction):
for role in user["user_security_roles"]:
if role["security_role"]["name"] in role_list:
return True
- break
-
return False
def launch(self, session, in_entities, event):
@@ -2228,28 +105,26 @@ class SyncToAvalonServer(BaseAction):
ft_project_name = in_entities[0]["project"]["full_name"]
try:
- entities_factory = SyncEntitiesFactory(
- self.log, session, ft_project_name
- )
+ self.entities_factory.launch_setup(ft_project_name)
time_1 = time.time()
- entities_factory.set_cutom_attributes()
+ self.entities_factory.set_cutom_attributes()
time_2 = time.time()
# This must happen before all filtering!!!
- entities_factory.prepare_avalon_entities(ft_project_name)
+ self.entities_factory.prepare_avalon_entities(ft_project_name)
time_3 = time.time()
- entities_factory.filter_by_ignore_sync()
+ self.entities_factory.filter_by_ignore_sync()
time_4 = time.time()
- entities_factory.duplicity_regex_check()
+ self.entities_factory.duplicity_regex_check()
time_5 = time.time()
- entities_factory.prepare_ftrack_ent_data()
+ self.entities_factory.prepare_ftrack_ent_data()
time_6 = time.time()
- entities_factory.synchronize()
+ self.entities_factory.synchronize()
time_7 = time.time()
self.log.debug(
@@ -2280,7 +155,7 @@ class SyncToAvalonServer(BaseAction):
"* Total time: {}".format(time_7 - time_start)
)
- report = entities_factory.report()
+ report = self.entities_factory.report()
if report and report.get("items"):
default_title = "Synchronization report ({}):".format(
ft_project_name
@@ -2322,13 +197,13 @@ class SyncToAvalonServer(BaseAction):
report = {"items": []}
try:
- report = entities_factory.report()
+ report = self.entities_factory.report()
except Exception:
pass
_items = report.get("items", [])
if _items:
- items.append(entities_factory.report_splitter)
+ items.append(self.entities_factory.report_splitter)
items.extend(_items)
self.show_interface(items, title, event)
@@ -2337,16 +212,16 @@ class SyncToAvalonServer(BaseAction):
finally:
try:
- entities_factory.dbcon.uninstall()
+ self.entities_factory.dbcon.uninstall()
except Exception:
pass
try:
- entities_factory.session.close()
+ self.entities_factory.session.close()
except Exception:
pass
+
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
-
SyncToAvalonServer(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_del_avalon_id_from_new.py b/pype/ftrack/events/event_del_avalon_id_from_new.py
index 3436fde252..d820e40467 100644
--- a/pype/ftrack/events/event_del_avalon_id_from_new.py
+++ b/pype/ftrack/events/event_del_avalon_id_from_new.py
@@ -1,6 +1,6 @@
-import ftrack_api
-from pype.ftrack import BaseEvent, get_ca_mongoid
-from pype.ftrack.events.event_sync_to_avalon import SyncToAvalon
+from pype.ftrack.lib import BaseEvent
+from pype.ftrack.lib.avalon_sync import CustAttrIdKey
+from pype.ftrack.events.event_sync_to_avalon import SyncToAvalonEvent
class DelAvalonIdFromNew(BaseEvent):
@@ -11,7 +11,8 @@ class DelAvalonIdFromNew(BaseEvent):
Priority of this event must be less than SyncToAvalon event
'''
- priority = SyncToAvalon.priority - 1
+ priority = SyncToAvalonEvent.priority - 1
+ ignore_me = True
def launch(self, session, event):
created = []
@@ -28,7 +29,7 @@ class DelAvalonIdFromNew(BaseEvent):
elif (
entity.get('action', None) == 'update' and
- get_ca_mongoid() in entity['keys'] and
+ CustAttrIdKey in entity['keys'] and
entity_id in created
):
ftrack_entity = session.get(
@@ -37,13 +38,11 @@ class DelAvalonIdFromNew(BaseEvent):
)
cust_attr = ftrack_entity['custom_attributes'][
- get_ca_mongoid()
+ CustAttrIdKey
]
if cust_attr != '':
- ftrack_entity['custom_attributes'][
- get_ca_mongoid()
- ] = ''
+ ftrack_entity['custom_attributes'][CustAttrIdKey] = ''
session.commit()
except Exception:
@@ -53,5 +52,4 @@ class DelAvalonIdFromNew(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
-
DelAvalonIdFromNew(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_sync_hier_attr.py b/pype/ftrack/events/event_sync_hier_attr.py
deleted file mode 100644
index 682575b52c..0000000000
--- a/pype/ftrack/events/event_sync_hier_attr.py
+++ /dev/null
@@ -1,213 +0,0 @@
-import os
-import sys
-
-from pype.ftrack.lib.io_nonsingleton import DbConnector
-
-import ftrack_api
-from pype.ftrack import BaseEvent, lib
-from bson.objectid import ObjectId
-
-
-class SyncHierarchicalAttrs(BaseEvent):
- # After sync to avalon event!
- priority = 101
- db_con = DbConnector()
- ca_mongoid = lib.get_ca_mongoid()
-
- def launch(self, session, event):
- # Filter entities and changed values if it makes sence to run script
- processable = []
- processable_ent = {}
- for ent in event['data']['entities']:
- # Ignore entities that are not tasks or projects
- if ent['entityType'].lower() not in ['task', 'show']:
- continue
-
- action = ent.get("action")
- # skip if remove (Entity does not exist in Ftrack)
- if action == "remove":
- continue
-
- # When entity was add we don't care about keys
- if action != "add":
- keys = ent.get('keys')
- if not keys:
- continue
-
- entity = session.get(self._get_entity_type(ent), ent['entityId'])
- processable.append(ent)
-
- processable_ent[ent['entityId']] = {
- "entity": entity,
- "action": action,
- "link": entity["link"]
- }
-
- if not processable:
- return True
-
- # Find project of entities
- ft_project = None
- for entity_dict in processable_ent.values():
- try:
- base_proj = entity_dict['link'][0]
- except Exception:
- continue
- ft_project = session.get(base_proj['type'], base_proj['id'])
- break
-
- # check if project is set to auto-sync
- if (
- ft_project is None or
- 'avalon_auto_sync' not in ft_project['custom_attributes'] or
- ft_project['custom_attributes']['avalon_auto_sync'] is False
- ):
- return True
-
- # Get hierarchical custom attributes from "avalon" group
- custom_attributes = {}
- query = 'CustomAttributeGroup where name is "avalon"'
- all_avalon_attr = session.query(query).one()
- for cust_attr in all_avalon_attr['custom_attribute_configurations']:
- if 'avalon_' in cust_attr['key']:
- continue
- if not cust_attr['is_hierarchical']:
- continue
- custom_attributes[cust_attr['key']] = cust_attr
-
- if not custom_attributes:
- return True
-
- self.db_con.install()
- self.db_con.Session['AVALON_PROJECT'] = ft_project['full_name']
-
- for ent in processable:
- entity_dict = processable_ent[ent['entityId']]
-
- entity = entity_dict["entity"]
- ent_path = "/".join([ent["name"] for ent in entity_dict['link']])
- action = entity_dict["action"]
-
- keys_to_process = {}
- if action == "add":
- # Store all custom attributes when entity was added
- for key in custom_attributes:
- keys_to_process[key] = entity['custom_attributes'][key]
- else:
- # Update only updated keys
- for key in ent['keys']:
- if key in custom_attributes:
- keys_to_process[key] = entity['custom_attributes'][key]
-
- processed_keys = self.get_hierarchical_values(
- keys_to_process, entity
- )
- # Do the processing of values
- self.update_hierarchical_attribute(entity, processed_keys, ent_path)
-
- self.db_con.uninstall()
-
- return True
-
- def get_hierarchical_values(self, keys_dict, entity):
- # check already set values
- _set_keys = []
- for key, value in keys_dict.items():
- if value is not None:
- _set_keys.append(key)
-
- # pop set values from keys_dict
- set_keys = {}
- for key in _set_keys:
- set_keys[key] = keys_dict.pop(key)
-
- # find if entity has set values and pop them out
- keys_to_pop = []
- for key in keys_dict.keys():
- _val = entity["custom_attributes"][key]
- if _val:
- keys_to_pop.append(key)
- set_keys[key] = _val
-
- for key in keys_to_pop:
- keys_dict.pop(key)
-
- # if there are not keys to find value return found
- if not keys_dict:
- return set_keys
-
- # end recursion if entity is project
- if entity.entity_type.lower() == "project":
- for key, value in keys_dict.items():
- set_keys[key] = value
-
- else:
- result = self.get_hierarchical_values(keys_dict, entity["parent"])
- for key, value in result.items():
- set_keys[key] = value
-
- return set_keys
-
- def update_hierarchical_attribute(self, entity, keys_dict, ent_path):
- # TODO store all keys at once for entity
- custom_attributes = entity.get('custom_attributes')
- if not custom_attributes:
- return
-
- mongoid = custom_attributes.get(self.ca_mongoid)
- if not mongoid:
- return
-
- try:
- mongoid = ObjectId(mongoid)
- except Exception:
- return
-
- mongo_entity = self.db_con.find_one({'_id': mongoid})
- if not mongo_entity:
- return
-
- changed_keys = {}
- data = mongo_entity.get('data') or {}
- for key, value in keys_dict.items():
- cur_value = data.get(key)
- if cur_value:
- if cur_value == value:
- continue
- changed_keys[key] = value
- data[key] = value
-
- if not changed_keys:
- return
-
- self.log.debug(
- "{} - updated hierarchical attributes: {}".format(
- ent_path, str(changed_keys)
- )
- )
-
- self.db_con.update_many(
- {'_id': mongoid},
- {'$set': {'data': data}}
- )
-
- for child in entity.get('children', []):
- _keys_dict = {}
- for key, value in keys_dict.items():
- if key not in child.get('custom_attributes', {}):
- continue
- child_value = child['custom_attributes'][key]
- if child_value is not None:
- continue
- _keys_dict[key] = value
-
- if not _keys_dict:
- continue
- child_path = "/".join([ent["name"] for ent in child['link']])
- self.update_hierarchical_attribute(child, _keys_dict, child_path)
-
-
-def register(session, plugins_presets):
- '''Register plugin. Called when used as an plugin.'''
-
- SyncHierarchicalAttrs(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py
index db8ca845a6..606866aba2 100644
--- a/pype/ftrack/events/event_sync_to_avalon.py
+++ b/pype/ftrack/events/event_sync_to_avalon.py
@@ -1,51 +1,526 @@
+import os
+import collections
+import copy
+import queue
+import time
+import atexit
+import traceback
+
+from bson.objectid import ObjectId
+from pymongo import UpdateOne
+
+from avalon import schema
+
+from pype.ftrack.lib import avalon_sync
+from pype.ftrack.lib.avalon_sync import (
+ CustAttrIdKey, CustAttrAutoSync, EntitySchemas
+)
import ftrack_api
-from pype.ftrack import BaseEvent, lib
+from pype.ftrack import BaseEvent
+
+from pype.ftrack.lib.io_nonsingleton import DbConnector
-class SyncToAvalon(BaseEvent):
+class SyncToAvalonEvent(BaseEvent):
- priority = 100
+ dbcon = DbConnector()
- ignore_entityType = [
- 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer',
- 'socialfeed', 'socialnotification', 'timelog'
+ ignore_entTypes = [
+ "socialfeed", "socialnotification", "note",
+ "assetversion", "job", "user", "reviewsessionobject", "timer",
+ "timelog", "auth_userrole"
]
+ ignore_ent_types = ["Milestone"]
+ ignore_keys = ["statusid"]
+
+ project_query = (
+ "select full_name, name, custom_attributes"
+ ", project_schema._task_type_schema.types.name"
+ " from Project where id is \"{}\""
+ )
+
+ entities_query_by_id = (
+ "select id, name, parent_id, link, custom_attributes from TypedContext"
+ " where project_id is \"{}\" and id in ({})"
+ )
+ entities_name_query_by_name = (
+ "select id, name from TypedContext"
+ " where project_id is \"{}\" and name in ({})"
+ )
+ created_entities = []
+
+ def __init__(self, session, plugins_presets={}):
+ '''Expects a ftrack_api.Session instance'''
+ self.set_process_session(session)
+ super().__init__(session, plugins_presets)
+
+ @property
+ def cur_project(self):
+ if self._cur_project is None:
+ found_id = None
+ for ent_info in self._cur_event["data"]["entities"]:
+ if found_id is not None:
+ break
+ parents = ent_info.get("parents") or []
+ for parent in parents:
+ if parent.get("entityType") == "show":
+ found_id = parent.get("entityId")
+ break
+ if found_id:
+ self._cur_project = self.process_session.query(
+ self.project_query.format(found_id)
+ ).one()
+ return self._cur_project
+
+ @property
+ def avalon_cust_attrs(self):
+ if self._avalon_cust_attrs is None:
+ self._avalon_cust_attrs = avalon_sync.get_avalon_attr(
+ self.process_session
+ )
+ return self._avalon_cust_attrs
+
+ @property
+ def avalon_entities(self):
+ if self._avalon_ents is None:
+ self.dbcon.install()
+ self.dbcon.Session["AVALON_PROJECT"] = (
+ self.cur_project["full_name"]
+ )
+ avalon_project = self.dbcon.find_one({"type": "project"})
+ avalon_entities = list(self.dbcon.find({"type": "asset"}))
+ self._avalon_ents = (avalon_project, avalon_entities)
+ return self._avalon_ents
+
+ @property
+ def avalon_ents_by_name(self):
+ if self._avalon_ents_by_name is None:
+ self._avalon_ents_by_name = {}
+ proj, ents = self.avalon_entities
+ for ent in ents:
+ self._avalon_ents_by_name[ent["name"]] = ent
+ return self._avalon_ents_by_name
+
+ @property
+ def avalon_ents_by_id(self):
+ if self._avalon_ents_by_id is None:
+ self._avalon_ents_by_id = {}
+ proj, ents = self.avalon_entities
+ self._avalon_ents_by_id[proj["_id"]] = proj
+ for ent in ents:
+ self._avalon_ents_by_id[ent["_id"]] = ent
+ return self._avalon_ents_by_id
+
+ @property
+ def avalon_ents_by_parent_id(self):
+ if self._avalon_ents_by_parent_id is None:
+ self._avalon_ents_by_parent_id = collections.defaultdict(list)
+ proj, ents = self.avalon_entities
+ for ent in ents:
+ vis_par = ent["data"]["visualParent"]
+ if vis_par is None:
+ vis_par = proj["_id"]
+ self._avalon_ents_by_parent_id[vis_par].append(ent)
+ return self._avalon_ents_by_parent_id
+
+ @property
+ def avalon_ents_by_ftrack_id(self):
+ if self._avalon_ents_by_ftrack_id is None:
+ self._avalon_ents_by_ftrack_id = {}
+ proj, ents = self.avalon_entities
+ ftrack_id = proj["data"]["ftrackId"]
+ self._avalon_ents_by_ftrack_id[ftrack_id] = proj
+ for ent in ents:
+ ftrack_id = ent["data"]["ftrackId"]
+ self._avalon_ents_by_ftrack_id[ftrack_id] = ent
+ return self._avalon_ents_by_ftrack_id
+
+ @property
+ def avalon_subsets_by_parents(self):
+ if self._avalon_subsets_by_parents is None:
+ self._avalon_subsets_by_parents = collections.defaultdict(list)
+ self.dbcon.install()
+ self.dbcon.Session["AVALON_PROJECT"] = (
+ self.cur_project["full_name"]
+ )
+ for subset in self.dbcon.find({"type": "subset"}):
+ self._avalon_subsets_by_parents[subset["parent"]].append(
+ subset
+ )
+ return self._avalon_subsets_by_parents
+
+ @property
+ def avalon_archived_by_id(self):
+ if self._avalon_archived_by_id is None:
+ self._avalon_archived_by_id = {}
+ self.dbcon.install()
+ self.dbcon.Session["AVALON_PROJECT"] = (
+ self.cur_project["full_name"]
+ )
+ for asset in self.dbcon.find({"type": "archived_asset"}):
+ self._avalon_archived_by_id[asset["_id"]] = asset
+ return self._avalon_archived_by_id
+
+ @property
+ def avalon_archived_by_name(self):
+ if self._avalon_archived_by_name is None:
+ self._avalon_archived_by_name = {}
+ for asset in self.avalon_archived_by_id.values():
+ self._avalon_archived_by_name[asset["name"]] = asset
+ return self._avalon_archived_by_name
+
+ @property
+ def changeability_by_mongo_id(self):
+ """Return info about changeability of entity and it's parents."""
+ if self._changeability_by_mongo_id is None:
+ self._changeability_by_mongo_id = collections.defaultdict(
+ lambda: True
+ )
+ avalon_project, avalon_entities = self.avalon_entities
+ self._changeability_by_mongo_id[avalon_project["_id"]] = False
+ self._bubble_changeability(
+ list(self.avalon_subsets_by_parents.keys())
+ )
+
+ return self._changeability_by_mongo_id
+
+ @property
+ def avalon_custom_attributes(self):
+ """Return info about changeability of entity and it's parents."""
+ if self._avalon_custom_attributes is None:
+ self._avalon_custom_attributes = avalon_sync.get_avalon_attr(
+ self.process_session
+ )
+ return self._avalon_custom_attributes
+
+ def remove_cached_by_key(self, key, values):
+ if self._avalon_ents is None:
+ return
+
+ if not isinstance(values, (list, tuple)):
+ values = [values]
+
+ def get_found_data(entity):
+ if not entity:
+ return None
+ return {
+ "ftrack_id": entity["data"]["ftrackId"],
+ "parent_id": entity["data"]["visualParent"],
+ "_id": entity["_id"],
+ "name": entity["name"],
+ "entity": entity
+ }
+
+ if key == "id":
+ key = "_id"
+ elif key == "ftrack_id":
+ key = "data.ftrackId"
+
+ found_data = {}
+ project, entities = self._avalon_ents
+ key_items = key.split(".")
+ for value in values:
+ ent = None
+ if key == "_id":
+ if self._avalon_ents_by_id is not None:
+ ent = self._avalon_ents_by_id.get(value)
+
+ elif key == "name":
+ if self._avalon_ents_by_name is not None:
+ ent = self._avalon_ents_by_name.get(value)
+
+ elif key == "data.ftrackId":
+ if self._avalon_ents_by_ftrack_id is not None:
+ ent = self._avalon_ents_by_ftrack_id.get(value)
+
+ if ent is None:
+ for _ent in entities:
+ _temp = _ent
+ for item in key_items:
+ _temp = _temp[item]
+
+ if _temp == value:
+ ent = _ent
+ break
+
+ found_data[value] = get_found_data(ent)
+
+ for value in values:
+ data = found_data[value]
+ if not data:
+ # TODO logging
+ self.log.warning(
+ "Didn't found entity by key/value \"{}\" / \"{}\"".format(
+ key, value
+ )
+ )
+ continue
+
+ ftrack_id = data["ftrack_id"]
+ parent_id = data["parent_id"]
+ mongo_id = data["_id"]
+ name = data["name"]
+ entity = data["entity"]
+
+ project, ents = self._avalon_ents
+ ents.remove(entity)
+ self._avalon_ents = project, ents
+
+ if self._avalon_ents_by_ftrack_id is not None:
+ self._avalon_ents_by_ftrack_id.pop(ftrack_id, None)
+
+ if self._avalon_ents_by_parent_id is not None:
+ self._avalon_ents_by_parent_id[parent_id].remove(entity)
+
+ if self._avalon_ents_by_id is not None:
+ self._avalon_ents_by_id.pop(mongo_id, None)
+
+ if self._avalon_ents_by_name is not None:
+ self._avalon_ents_by_name.pop(name, None)
+
+ if self._avalon_archived_by_id is not None:
+ self._avalon_archived_by_id[mongo_id] = entity
+
+ if mongo_id in self.task_changes_by_avalon_id:
+ self.task_changes_by_avalon_id.pop(mongo_id)
+
+ def _bubble_changeability(self, unchangeable_ids):
+ unchangeable_queue = queue.Queue()
+ for entity_id in unchangeable_ids:
+ unchangeable_queue.put((entity_id, False))
+
+ processed_parents_ids = []
+ while not unchangeable_queue.empty():
+ entity_id, child_is_archived = unchangeable_queue.get()
+ # skip if already processed
+ if entity_id in processed_parents_ids:
+ continue
+
+ entity = self.avalon_ents_by_id.get(entity_id)
+ # if entity is not archived but unchageable child was then skip
+ # - archived entities should not affect not archived?
+ if entity and child_is_archived:
+ continue
+
+ # set changeability of current entity to False
+ self._changeability_by_mongo_id[entity_id] = False
+ processed_parents_ids.append(entity_id)
+ # if not entity then is probably archived
+ if not entity:
+ entity = self.avalon_archived_by_id.get(entity_id)
+ child_is_archived = True
+
+ if not entity:
+ # if entity is not found then it is subset without parent
+ if entity_id in unchangeable_ids:
+ _subset_ids = [
+ str(sub["_id"]) for sub in
+ self.avalon_subsets_by_parents[entity_id]
+ ]
+ joined_subset_ids = "| ".join(_subset_ids)
+ self.log.warning((
+ "Parent <{}> for subsets <{}> does not exist"
+ ).format(str(entity_id), joined_subset_ids))
+ else:
+ self.log.warning((
+ "In avalon are entities without valid parents that"
+ " lead to Project (should not cause errors)"
+ " - MongoId <{}>"
+ ).format(str(entity_id)))
+ continue
+
+ # skip if parent is project
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is None:
+ continue
+ unchangeable_queue.put((parent_id, child_is_archived))
+
+ def reset_variables(self):
+ """Reset variables so each event callback has clear env."""
+ self._cur_project = None
+
+ self._avalon_cust_attrs = None
+
+ self._avalon_ents = None
+ self._avalon_ents_by_id = None
+ self._avalon_ents_by_parent_id = None
+ self._avalon_ents_by_ftrack_id = None
+ self._avalon_ents_by_name = None
+ self._avalon_subsets_by_parents = None
+ self._changeability_by_mongo_id = None
+ self._avalon_archived_by_id = None
+ self._avalon_archived_by_name = None
+
+ self.task_changes_by_avalon_id = {}
+
+ self._avalon_custom_attributes = None
+ self._ent_types_by_name = None
+
+ self.ftrack_ents_by_id = {}
+ self.obj_id_ent_type_map = {}
+ self.ftrack_recreated_mapping = {}
+
+ self.ftrack_added = {}
+ self.ftrack_moved = {}
+ self.ftrack_renamed = {}
+ self.ftrack_updated = {}
+ self.ftrack_removed = {}
+
+ self.moved_in_avalon = []
+ self.renamed_in_avalon = []
+ self.hier_cust_attrs_changes = collections.defaultdict(list)
+
+ self.duplicated = []
+ self.regex_failed = []
+
+ self.regex_schemas = {}
+ self.updates = collections.defaultdict(dict)
+
+ self.report_items = {
+ "info": collections.defaultdict(list),
+ "warning": collections.defaultdict(list),
+ "error": collections.defaultdict(list)
+ }
+
+ def set_process_session(self, session):
+ try:
+ self.process_session.close()
+ except Exception:
+ pass
+ self.process_session = ftrack_api.Session(
+ server_url=session.server_url,
+ api_key=session.api_key,
+ api_user=session.api_user,
+ auto_connect_event_hub=True
+ )
+ atexit.register(lambda: self.process_session.close())
+
+ def filter_updated(self, updates):
+ filtered_updates = {}
+ for ftrack_id, ent_info in updates.items():
+ changed_keys = [k for k in (ent_info.get("keys") or [])]
+ changes = {
+ k: v for k, v in (ent_info.get("changes") or {}).items()
+ }
+
+ entity_type = ent_info["entity_type"]
+ if entity_type == "Task":
+ if "name" in changed_keys:
+ ent_info["keys"] = ["name"]
+ ent_info["changes"] = {"name": changes.pop("name")}
+ filtered_updates[ftrack_id] = ent_info
+ continue
+
+ for _key in self.ignore_keys:
+ if _key in changed_keys:
+ changed_keys.remove(_key)
+ changes.pop(_key, None)
+
+ if not changed_keys:
+ continue
+
+ # Remove custom attributes starting with `avalon_` from changes
+ # - these custom attributes are not synchronized
+ avalon_keys = []
+ for key in changes:
+ if key.startswith("avalon_"):
+ avalon_keys.append(key)
+
+ for _key in avalon_keys:
+ changed_keys.remove(_key)
+ changes.pop(_key, None)
+
+ if not changed_keys:
+ continue
+
+ ent_info["keys"] = changed_keys
+ ent_info["changes"] = changes
+ filtered_updates[ftrack_id] = ent_info
+
+ return filtered_updates
+
+ def get_ent_path(self, ftrack_id):
+ entity = self.ftrack_ents_by_id.get(ftrack_id)
+ if not entity:
+ entity = self.process_session.query(
+ self.entities_query_by_id.format(
+ self.cur_project["id"], ftrack_id
+ )
+ ).first()
+ if entity:
+ self.ftrack_ents_by_id[ftrack_id] = entity
+ else:
+ return "unknown hierarchy"
+ return "/".join([ent["name"] for ent in entity["link"]])
def launch(self, session, event):
- ca_mongoid = lib.get_ca_mongoid()
- # If mongo_id textfield has changed: RETURN!
- # - infinite loop
- for ent in event['data']['entities']:
- if ent.get('keys') is not None:
- if ca_mongoid in ent['keys']:
- return
+ # Try to commit and if any error happen then recreate session
+ try:
+ self.process_session.commit()
+ except Exception:
+ self.set_process_session(session)
- entities = self._get_entities(session, event, self.ignore_entityType)
- ft_project = None
- # get project
- for entity in entities:
- try:
- base_proj = entity['link'][0]
- except Exception:
+ # Reset object values for each launch
+ self.reset_variables()
+ self._cur_event = event
+
+ entities_by_action = {
+ "remove": {},
+ "update": {},
+ "move": {},
+ "add": {}
+ }
+
+ entities_info = event["data"]["entities"]
+ found_actions = set()
+ for ent_info in entities_info:
+ entityType = ent_info["entityType"]
+ if entityType in self.ignore_entTypes:
continue
- ft_project = session.get(base_proj['type'], base_proj['id'])
- break
- for ent_info in event['data']['entities']:
+ entity_type = ent_info.get("entity_type")
+ if not entity_type or entity_type in self.ignore_ent_types:
+ continue
+
+ action = ent_info["action"]
+ ftrack_id = ent_info["entityId"]
+ if action == "move":
+ ent_keys = ent_info["keys"]
+ # Seprate update info from move action
+ if len(ent_keys) > 1:
+ _ent_info = ent_info.copy()
+ for ent_key in ent_keys:
+ if ent_key == "parent_id":
+ _ent_info["changes"].pop(ent_key, None)
+ _ent_info["keys"].remove(ent_key)
+ else:
+ ent_info["changes"].pop(ent_key, None)
+ ent_info["keys"].remove(ent_key)
+
+ entities_by_action["update"][ftrack_id] = _ent_info
+
+ found_actions.add(action)
+ entities_by_action[action][ftrack_id] = ent_info
+
+ found_actions = list(found_actions)
+ if not found_actions:
+ return True
+
+ # Check if auto sync was turned on/off
+ updated = entities_by_action["update"]
+ for ftrack_id, ent_info in updated.items():
# filter project
- if ent_info.get("entityType") != "show":
+ if ent_info["entityType"] != "show":
continue
- if ent_info.get("action") != "update":
+ changes = ent_info["changes"]
+ if CustAttrAutoSync not in changes:
continue
- changes = ent_info.get("changes") or {}
- if 'avalon_auto_sync' not in changes:
- continue
-
- auto_sync = changes['avalon_auto_sync']["new"]
+ auto_sync = changes[CustAttrAutoSync]["new"]
if auto_sync == "1":
# Trigger sync to avalon action if auto sync was turned on
+ ft_project = self.cur_project
self.log.debug((
"Auto sync was turned on for project <{}>."
" Triggering syncToAvalon action."
@@ -54,11 +529,6 @@ class SyncToAvalon(BaseEvent):
"entityId": ft_project["id"],
"entityType": "show"
}]
- # Stop event so sync hierarchical won't be affected
- # - other event should not be affected since auto-sync
- # is in all cases single data event
- event.stop()
- # Trigger action
self.trigger_action(
action_name="sync.to.avalon.server",
event=event,
@@ -67,98 +537,1746 @@ class SyncToAvalon(BaseEvent):
# Exit for both cases
return True
- # check if project is set to auto-sync
+ # Filter updated data by changed keys
+ updated = self.filter_updated(updated)
+
+ # skip most of events where nothing has changed for avalon
if (
- ft_project is None or
- 'avalon_auto_sync' not in ft_project['custom_attributes'] or
- ft_project['custom_attributes']['avalon_auto_sync'] is False
+ len(found_actions) == 1 and
+ found_actions[0] == "update" and
+ not updated
+ ):
+ return True
+
+ ft_project = self.cur_project
+ # Check if auto-sync custom attribute exists
+ if CustAttrAutoSync not in ft_project["custom_attributes"]:
+ # TODO should we sent message to someone?
+ self.log.error((
+ "Custom attribute \"{}\" is not created or user \"{}\" used"
+ " for Event server don't have permissions to access it!"
+ ).format(CustAttrAutoSync, self.session.api_user))
+ return True
+
+ # Skip if auto-sync is not set
+ auto_sync = ft_project["custom_attributes"][CustAttrAutoSync]
+ if auto_sync is not True:
+ return True
+
+ debug_msg = ""
+ debug_msg += "Updated: {}".format(len(updated))
+ debug_action_map = {
+ "add": "Created",
+ "remove": "Removed",
+ "move": "Moved"
+ }
+ for action, infos in entities_by_action.items():
+ if action == "update":
+ continue
+ _action = debug_action_map[action]
+ debug_msg += "| {}: {}".format(_action, len(infos))
+
+ self.log.debug("Project changes <{}>: {}".format(
+ ft_project["full_name"], debug_msg
+ ))
+ # Get ftrack entities - find all ftrack ids first
+ ftrack_ids = []
+ for ftrack_id in updated:
+ ftrack_ids.append(ftrack_id)
+
+ for action, ftrack_ids in entities_by_action.items():
+ # skip updated (already prepared) and removed (not exist in ftrack)
+ if action == "remove":
+ continue
+
+ for ftrack_id in ftrack_ids:
+ if ftrack_id not in ftrack_ids:
+ ftrack_ids.append(ftrack_id)
+
+ if ftrack_ids:
+ joined_ids = ", ".join(["\"{}\"".format(id) for id in ftrack_ids])
+ ftrack_entities = self.process_session.query(
+ self.entities_query_by_id.format(ft_project["id"], joined_ids)
+ ).all()
+ for entity in ftrack_entities:
+ self.ftrack_ents_by_id[entity["id"]] = entity
+
+ # Filter updates where name is changing
+ for ftrack_id, ent_info in updated.items():
+ ent_keys = ent_info["keys"]
+ # Seprate update info from rename
+ if "name" not in ent_keys:
+ continue
+
+ _ent_info = copy.deepcopy(ent_info)
+ for ent_key in ent_keys:
+ if ent_key == "name":
+ ent_info["changes"].pop(ent_key, None)
+ ent_info["keys"].remove(ent_key)
+ else:
+ _ent_info["changes"].pop(ent_key, None)
+ _ent_info["keys"].remove(ent_key)
+
+ self.ftrack_renamed[ftrack_id] = _ent_info
+
+ self.ftrack_removed = entities_by_action["remove"]
+ self.ftrack_moved = entities_by_action["move"]
+ self.ftrack_added = entities_by_action["add"]
+ self.ftrack_updated = updated
+
+ self.log.debug("Synchronization begins")
+ try:
+ time_1 = time.time()
+ # 1.) Process removed - may affect all other actions
+ self.process_removed()
+ time_2 = time.time()
+ # 2.) Process renamed - may affect added
+ self.process_renamed()
+ time_3 = time.time()
+ # 3.) Process added - moved entity may be moved to new entity
+ self.process_added()
+ time_4 = time.time()
+ # 4.) Process moved
+ self.process_moved()
+ time_5 = time.time()
+ # 5.) Process updated
+ self.process_updated()
+ time_6 = time.time()
+ # 6.) Process changes in hierarchy or hier custom attribues
+ self.process_hier_cleanup()
+ if self.updates:
+ self.update_entities()
+ time_7 = time.time()
+
+ time_removed = time_2 - time_1
+ time_renamed = time_3 - time_2
+ time_added = time_4 - time_3
+ time_moved = time_5 - time_4
+ time_updated = time_6 - time_5
+ time_cleanup = time_7 - time_6
+ time_total = time_7 - time_1
+ self.log.debug("Process time: {} <{}, {}, {}, {}, {}, {}>".format(
+ time_total, time_removed, time_renamed, time_added, time_moved,
+ time_updated, time_cleanup
+ ))
+
+ except Exception:
+ msg = "An error has happened during synchronization"
+ self.report_items["error"][msg].append((
+ str(traceback.format_exc()).replace("\n", "
")
+ ).replace(" ", " "))
+
+ self.report()
+ return True
+
+ def process_removed(self):
+ if not self.ftrack_removed:
+ return
+ ent_infos = self.ftrack_removed
+ removable_ids = []
+ recreate_ents = []
+ removed_names = []
+ for ftrack_id, removed in ent_infos.items():
+ entity_type = removed["entity_type"]
+ parent_id = removed["parentId"]
+ removed_name = removed["changes"]["name"]["old"]
+ if entity_type == "Task":
+ avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id)
+ if not avalon_ent:
+ self.log.debug((
+ "Parent entity of task was not found in avalon <{}>"
+ ).format(self.get_ent_path(parent_id)))
+ continue
+
+ mongo_id = avalon_ent["_id"]
+ if mongo_id not in self.task_changes_by_avalon_id:
+ self.task_changes_by_avalon_id[mongo_id] = (
+ avalon_ent["data"]["tasks"]
+ )
+
+ if removed_name in self.task_changes_by_avalon_id[mongo_id]:
+ self.task_changes_by_avalon_id[mongo_id].remove(
+ removed_name
+ )
+
+ continue
+
+ avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if not avalon_ent:
+ continue
+ mongo_id = avalon_ent["_id"]
+ if self.changeability_by_mongo_id[mongo_id]:
+ removable_ids.append(mongo_id)
+ removed_names.append(removed_name)
+ else:
+ recreate_ents.append(avalon_ent)
+
+ if removable_ids:
+ # TODO logging
+ self.log.debug("Assets marked as archived <{}>".format(
+ ", ".join(removed_names)
+ ))
+ self.dbcon.update_many(
+ {"_id": {"$in": removable_ids}, "type": "asset"},
+ {"$set": {"type": "archived_asset"}}
+ )
+ self.remove_cached_by_key("id", removable_ids)
+
+ if recreate_ents:
+ # sort removed entities by parents len
+ # - length of parents determine hierarchy level
+ recreate_ents = sorted(
+ recreate_ents,
+ key=(lambda item: len(
+ (item.get("data", {}).get("parents") or [])
+ ))
+ )
+ # TODO logging
+ # TODO report
+ recreate_msg = (
+ "Deleted entity was recreated||Entity was recreated because"
+ " it or its children contain published data"
+ )
+ proj, ents = self.avalon_entities
+ for avalon_entity in recreate_ents:
+ old_ftrack_id = avalon_entity["data"]["ftrackId"]
+ vis_par = avalon_entity["data"]["visualParent"]
+ if vis_par is None:
+ vis_par = proj["_id"]
+ parent_ent = self.avalon_ents_by_id[vis_par]
+ parent_ftrack_id = parent_ent["data"]["ftrackId"]
+ parent_ftrack_ent = self.ftrack_ents_by_id.get(
+ parent_ftrack_id
+ )
+ if not parent_ftrack_ent:
+ if parent_ent["type"].lower() == "project":
+ parent_ftrack_ent = self.cur_project
+ else:
+ parent_ftrack_ent = self.process_session.query(
+ self.entities_query_by_id.format(
+ self.cur_project["id"], parent_ftrack_id
+ )
+ ).one()
+ entity_type = avalon_entity["data"]["entityType"]
+ new_entity = self.process_session.create(entity_type, {
+ "name": avalon_entity["name"],
+ "parent": parent_ftrack_ent
+ })
+ try:
+ self.process_session.commit()
+ except Exception:
+ # TODO logging
+ # TODO report
+ self.process_session.rolback()
+ ent_path_items = [self.cur_project["full_name"]]
+ ent_path_items.extend([
+ par for par in avalon_entity["data"]["parents"]
+ ])
+ ent_path_items.append(avalon_entity["name"])
+ ent_path = "/".join(ent_path_items)
+
+ error_msg = "Couldn't recreate entity in Ftrack"
+ report_msg = (
+ "{}||Trying to recreate because it or its children"
+ " contain published data"
+ ).format(error_msg)
+ self.report_items["warning"][report_msg].append(ent_path)
+ self.log.warning(
+ "{}. Process session commit failed! <{}>".format(
+ error_msg, ent_path
+ ),
+ exc_info=True
+ )
+ continue
+
+ new_entity_id = new_entity["id"]
+ avalon_entity["data"]["ftrackId"] = new_entity_id
+
+ for key, val in avalon_entity["data"].items():
+ if not val:
+ continue
+ if key not in new_entity["custom_attributes"]:
+ continue
+
+ new_entity["custom_attributes"][key] = val
+
+ new_entity["custom_attributes"][CustAttrIdKey] = (
+ str(avalon_entity["_id"])
+ )
+ ent_path = self.get_ent_path(new_entity_id)
+
+ try:
+ self.process_session.commit()
+ except Exception:
+ # TODO logging
+ # TODO report
+ self.process_session.rolback()
+ error_msg = (
+ "Couldn't update custom attributes after recreation"
+ " of entity in Ftrack"
+ )
+ report_msg = (
+ "{}||Entity was recreated because it or its children"
+ " contain published data"
+ ).format(error_msg)
+ self.report_items["warning"][report_msg].append(ent_path)
+ self.log.warning(
+ "{}. Process session commit failed! <{}>".format(
+ error_msg, ent_path
+ ),
+ exc_info=True
+ )
+ continue
+
+ self.report_items["info"][recreate_msg].append(ent_path)
+
+ self.ftrack_recreated_mapping[old_ftrack_id] = new_entity_id
+ self.process_session.commit()
+
+ found_idx = None
+ for idx, _entity in enumerate(self._avalon_ents):
+ if _entity["_id"] == avalon_entity["_id"]:
+ found_idx = idx
+ break
+
+ if found_idx is None:
+ continue
+
+ # Prepare updates dict for mongo update
+ if "data" not in self.updates[avalon_entity["_id"]]:
+ self.updates[avalon_entity["_id"]]["data"] = {}
+
+ self.updates[avalon_entity["_id"]]["data"]["ftrackId"] = (
+ new_entity_id
+ )
+ # Update cached entities
+ self._avalon_ents[found_idx] = avalon_entity
+
+ if self._avalon_ents_by_id is not None:
+ mongo_id = avalon_entity["_id"]
+ self._avalon_ents_by_id[mongo_id] = avalon_entity
+
+ if self._avalon_ents_by_parent_id is not None:
+ vis_par = avalon_entity["data"]["visualParent"]
+ children = self._avalon_ents_by_parent_id[vis_par]
+ found_idx = None
+ for idx, _entity in enumerate(children):
+ if _entity["_id"] == avalon_entity["_id"]:
+ found_idx = idx
+ break
+ children[found_idx] = avalon_entity
+ self._avalon_ents_by_parent_id[vis_par] = children
+
+ if self._avalon_ents_by_ftrack_id is not None:
+ self._avalon_ents_by_ftrack_id.pop(old_ftrack_id)
+ self._avalon_ents_by_ftrack_id[new_entity_id] = (
+ avalon_entity
+ )
+
+ if self._avalon_ents_by_name is not None:
+ name = avalon_entity["name"]
+ self._avalon_ents_by_name[name] = avalon_entity
+
+ # Check if entities with same name can be synchronized
+ if not removed_names:
+ return
+
+ self.check_names_synchronizable(removed_names)
+
+ def check_names_synchronizable(self, names):
+ """Check if entities with specific names are importable.
+
+ This check should happend after removing entity or renaming entity.
+ When entity was removed or renamed then it's name is possible to sync.
+ """
+ joined_passed_names = ", ".join(
+ ["\"{}\"".format(name) for name in names]
+ )
+ same_name_entities = self.process_session.query(
+ self.entities_name_query_by_name.format(
+ self.cur_project["id"], joined_passed_names
+ )
+ ).all()
+ if not same_name_entities:
+ return
+
+ entities_by_name = collections.defaultdict(list)
+ for entity in same_name_entities:
+ entities_by_name[entity["name"]].append(entity)
+
+ synchronizable_ents = []
+ self.log.debug((
+ "Deleting of entities should allow to synchronize another entities"
+ " with same name."
+ ))
+ for name, ents in entities_by_name.items():
+ if len(ents) != 1:
+ self.log.debug((
+ "Name \"{}\" still have more than one entity <{}>"
+ ).format(
+ name, "| ".join(
+ [self.get_ent_path(ent["id"]) for ent in ents]
+ )
+ ))
+ continue
+
+ entity = ents[0]
+ ent_path = self.get_ent_path(entity["id"])
+ # TODO logging
+ self.log.debug(
+ "Checking if can synchronize entity <{}>".format(ent_path)
+ )
+ # skip if already synchronized
+ ftrack_id = entity["id"]
+ if ftrack_id in self.avalon_ents_by_ftrack_id:
+ # TODO logging
+ self.log.debug(
+ "- Entity is already synchronized (skipping) <{}>".format(
+ ent_path
+ )
+ )
+ continue
+
+ parent_id = entity["parent_id"]
+ if parent_id not in self.avalon_ents_by_ftrack_id:
+ # TODO logging
+ self.log.debug((
+ "- Entity's parent entity doesn't seems to"
+ " be synchronized (skipping) <{}>"
+ ).format(ent_path))
+ continue
+
+ synchronizable_ents.append(entity)
+
+ if not synchronizable_ents:
+ return
+
+ synchronizable_ents = sorted(
+ synchronizable_ents,
+ key=(lambda entity: len(entity["link"]))
+ )
+
+ children_queue = queue.Queue()
+ for entity in synchronizable_ents:
+ parent_avalon_ent = self.avalon_ents_by_ftrack_id[
+ entity["parent_id"]
+ ]
+ self.create_entity_in_avalon(entity, parent_avalon_ent)
+
+ for child in entity["children"]:
+ if child.entity_type.lower() == "task":
+ continue
+ children_queue.put(child)
+
+ while not children_queue.empty():
+ entity = children_queue.get()
+ ftrack_id = entity["id"]
+ name = entity["name"]
+ ent_by_ftrack_id = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if ent_by_ftrack_id:
+ raise Exception((
+ "This is bug, parent was just synchronized to avalon"
+ " but entity is already in database {}"
+ ).format(dict(entity)))
+
+ # Entity has duplicated name with another entity
+ # - may be renamed: in that case renaming method will handle that
+ duplicate_ent = self.avalon_ents_by_name.get(name)
+ if duplicate_ent:
+ continue
+
+ passed_regex = avalon_sync.check_regex(
+ name, "asset", schema_patterns=self.regex_schemas
+ )
+ if not passed_regex:
+ continue
+
+ parent_id = entity["parent_id"]
+ parent_avalon_ent = self.avalon_ents_by_ftrack_id[parent_id]
+
+ self.create_entity_in_avalon(entity, parent_avalon_ent)
+
+ for child in entity["children"]:
+ if child.entity_type.lower() == "task":
+ continue
+ children_queue.put(child)
+
+ def create_entity_in_avalon(self, ftrack_ent, parent_avalon):
+ proj, ents = self.avalon_entities
+
+ # Parents, Hierarchy
+ ent_path_items = [ent["name"] for ent in ftrack_ent["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ hierarchy = ""
+ if len(parents) > 0:
+ hierarchy = os.path.sep.join(parents)
+
+ # TODO logging
+ self.log.debug(
+ "Trying to synchronize entity <{}>".format(
+ "/".join(ent_path_items)
+ )
+ )
+
+ # Tasks
+ tasks = []
+ for child in ftrack_ent["children"]:
+ if child.entity_type.lower() != "task":
+ continue
+ tasks.append(child["name"])
+
+ # Visual Parent
+ vis_par = None
+ if parent_avalon["type"].lower() != "project":
+ vis_par = parent_avalon["_id"]
+
+ mongo_id = ObjectId()
+ name = ftrack_ent["name"]
+ final_entity = {
+ "_id": mongo_id,
+ "name": name,
+ "type": "asset",
+ "schema": EntitySchemas["asset"],
+ "parent": proj["_id"],
+ "data": {
+ "ftrackId": ftrack_ent["id"],
+ "entityType": ftrack_ent.entity_type,
+ "parents": parents,
+ "hierarchy": hierarchy,
+ "tasks": tasks,
+ "visualParent": vis_par
+ }
+ }
+ cust_attrs = self.get_cust_attr_values(ftrack_ent)
+ for key, val in cust_attrs.items():
+ if key.startswith("avalon_"):
+ continue
+ final_entity["data"][key] = val
+
+ _mongo_id_str = cust_attrs.get(CustAttrIdKey)
+ if _mongo_id_str:
+ try:
+ _mongo_id = ObjectId(_mongo_id_str)
+ if _mongo_id not in self.avalon_ents_by_id:
+ mongo_id = _mongo_id
+ final_entity["_id"] = mongo_id
+
+ except Exception:
+ pass
+
+ ent_path_items = [self.cur_project["full_name"]]
+ ent_path_items.extend([par for par in parents])
+ ent_path_items.append(name)
+ ent_path = "/".join(ent_path_items)
+
+ try:
+ schema.validate(final_entity)
+ except Exception:
+ # TODO logging
+ # TODO report
+ error_msg = (
+ "Schema validation failed for new entity (This is a bug)"
+ )
+ error_traceback = (
+ str(traceback.format_exc()).replace("\n", "
")
+ ).replace(" ", " ")
+
+ item_msg = ent_path + "
" + error_traceback
+ self.report_items["error"][error_msg].append(item_msg)
+ self.log.error(
+ "{}: \"{}\"".format(error_msg, str(final_entity)),
+ exc_info=True
+ )
+ return None
+
+ replaced = False
+ archived = self.avalon_archived_by_name.get(name)
+ if archived:
+ archived_id = archived["_id"]
+ if (
+ archived["data"]["parents"] == parents or
+ self.changeability_by_mongo_id[archived_id]
+ ):
+ # TODO logging
+ self.log.debug(
+ "Entity was unarchived instead of creation <{}>".format(
+ ent_path
+ )
+ )
+ mongo_id = archived_id
+ final_entity["_id"] = mongo_id
+ self.dbcon.replace_one({"_id": mongo_id}, final_entity)
+ replaced = True
+
+ if not replaced:
+ self.dbcon.insert_one(final_entity)
+ # TODO logging
+ self.log.debug("Entity was synchronized <{}>".format(ent_path))
+
+ mongo_id_str = str(mongo_id)
+ if mongo_id_str != ftrack_ent["custom_attributes"][CustAttrIdKey]:
+ ftrack_ent["custom_attributes"][CustAttrIdKey] = mongo_id_str
+ try:
+ self.process_session.commit()
+ except Exception:
+ self.process_session.rolback()
+ # TODO logging
+ # TODO report
+ error_msg = "Failed to store MongoID to entity's custom attribute"
+ report_msg = (
+ "{}||SyncToAvalon action may solve this issue"
+ ).format(error_msg)
+
+ self.report_items["warning"][report_msg].append(ent_path)
+ self.log.error(
+ "{}: \"{}\"".format(error_msg, ent_path),
+ exc_info=True
+ )
+
+ # modify cached data
+ # Skip if self._avalon_ents is not set(maybe never happen)
+ if self._avalon_ents is None:
+ return final_entity
+
+ if self._avalon_ents is not None:
+ proj, ents = self._avalon_ents
+ ents.append(final_entity)
+ self._avalon_ents = (proj, ents)
+
+ if self._avalon_ents_by_id is not None:
+ self._avalon_ents_by_id[mongo_id] = final_entity
+
+ if self._avalon_ents_by_parent_id is not None:
+ self._avalon_ents_by_parent_id[vis_par].append(final_entity)
+
+ if self._avalon_ents_by_ftrack_id is not None:
+ self._avalon_ents_by_ftrack_id[ftrack_ent["id"]] = final_entity
+
+ if self._avalon_ents_by_name is not None:
+ self._avalon_ents_by_name[ftrack_ent["name"]] = final_entity
+
+ return final_entity
+
+ def get_cust_attr_values(self, entity, keys=None):
+ output = {}
+ custom_attrs, hier_attrs = self.avalon_custom_attributes
+ not_processed_keys = True
+ if keys:
+ not_processed_keys = [k for k in keys]
+ # Notmal custom attributes
+ processed_keys = []
+ for attr in custom_attrs:
+ if not not_processed_keys:
+ break
+ key = attr["key"]
+ if key in processed_keys:
+ continue
+
+ if key not in entity["custom_attributes"]:
+ continue
+
+ if keys:
+ if key not in keys:
+ continue
+ else:
+ not_processed_keys.remove(key)
+
+ output[key] = entity["custom_attributes"][key]
+ processed_keys.append(key)
+
+ if not not_processed_keys:
+ return output
+
+ # Hierarchical cust attrs
+ hier_keys = []
+ defaults = {}
+ for attr in hier_attrs:
+ key = attr["key"]
+ if keys and key not in keys:
+ continue
+ hier_keys.append(key)
+ defaults[key] = attr["default"]
+
+ hier_values = avalon_sync.get_hierarchical_attributes(
+ self.process_session, entity, hier_keys, defaults
+ )
+ for key, val in hier_values.items():
+ output[key] = val
+
+ return output
+
+ def process_renamed(self):
+ if not self.ftrack_renamed:
+ return
+
+ ent_infos = self.ftrack_renamed
+ renamed_tasks = {}
+ not_found = {}
+ changeable_queue = queue.Queue()
+ for ftrack_id, ent_info in ent_infos.items():
+ entity_type = ent_info["entity_type"]
+ new_name = ent_info["changes"]["name"]["new"]
+ old_name = ent_info["changes"]["name"]["old"]
+ if entity_type == "Task":
+ parent_id = ent_info["parentId"]
+ renamed_tasks[parent_id] = {
+ "new": new_name,
+ "old": old_name,
+ "ent_info": ent_info
+ }
+ continue
+
+ ent_path = self.get_ent_path(ftrack_id)
+ avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if not avalon_ent:
+ # TODO logging
+ self.log.debug((
+ "Can't change the name (Entity is not is avalon) <{}>"
+ ).format(ent_path))
+ not_found[ftrack_id] = ent_info
+ continue
+
+ if new_name == avalon_ent["name"]:
+ # TODO logging
+ self.log.debug((
+ "Avalon entity already has the same name <{}>"
+ ).format(ent_path))
+ continue
+
+ mongo_id = avalon_ent["_id"]
+ if self.changeability_by_mongo_id[mongo_id]:
+ changeable_queue.put((ftrack_id, avalon_ent, new_name))
+ else:
+ ftrack_ent = self.ftrack_ents_by_id[ftrack_id]
+ ftrack_ent["name"] = avalon_ent["name"]
+ try:
+ self.process_session.commit()
+ # TODO logging
+ # TODO report
+ error_msg = "Entity renamed back"
+ report_msg = (
+ "{}||It is not possible to change"
+ " the name of an entity or it's parents, "
+ " if it already contained published data."
+ ).format(error_msg)
+ self.report_items["info"][report_msg].append(ent_path)
+ self.log.warning("{} <{}>".format(error_msg, ent_path))
+
+ except Exception:
+ self.process_session.rollback()
+ # TODO report
+ # TODO logging
+ error_msg = (
+ "Couldn't rename the entity back to its original name"
+ )
+ report_msg = (
+ "{}||Renamed because it is not possible to"
+ " change the name of an entity or it's parents, "
+ " if it already contained published data."
+ ).format(error_msg)
+ error_traceback = (
+ str(traceback.format_exc()).replace("\n", "
")
+ ).replace(" ", " ")
+
+ item_msg = ent_path + "
" + error_traceback
+ self.report_items["warning"][report_msg].append(item_msg)
+ self.log.warning(
+ "{}: \"{}\"".format(error_msg, ent_path),
+ exc_info=True
+ )
+
+ old_names = []
+ # Process renaming in Avalon DB
+ while not changeable_queue.empty():
+ ftrack_id, avalon_ent, new_name = changeable_queue.get()
+ mongo_id = avalon_ent["_id"]
+ old_name = avalon_ent["name"]
+
+ _entity_type = "asset"
+ if entity_type == "Project":
+ _entity_type = "project"
+
+ passed_regex = avalon_sync.check_regex(
+ new_name, _entity_type, schema_patterns=self.regex_schemas
+ )
+ if not passed_regex:
+ self.regex_failed.append(ftrack_id)
+ continue
+
+ # if avalon does not have same name then can be changed
+ same_name_avalon_ent = self.avalon_ents_by_name.get(new_name)
+ if not same_name_avalon_ent:
+ old_val = self._avalon_ents_by_name.pop(old_name)
+ old_val["name"] = new_name
+ self._avalon_ents_by_name[new_name] = old_val
+ self.updates[mongo_id] = {"name": new_name}
+ self.renamed_in_avalon.append(mongo_id)
+
+ old_names.append(old_name)
+ if new_name in old_names:
+ old_names.remove(new_name)
+
+ # TODO logging
+ ent_path = self.get_ent_path(ftrack_id)
+ self.log.debug(
+ "Name of entity will be changed to \"{}\" <{}>".format(
+ new_name, ent_path
+ )
+ )
+ continue
+
+ # Check if same name is in changable_queue
+ # - it's name may be changed in next iteration
+ same_name_ftrack_id = same_name_avalon_ent["data"]["ftrackId"]
+ same_is_unprocessed = False
+ for item in list(changeable_queue.queue):
+ if same_name_ftrack_id == item[0]:
+ same_is_unprocessed = True
+ break
+
+ if same_is_unprocessed:
+ changeable_queue.put((ftrack_id, avalon_ent, new_name))
+ continue
+
+ self.duplicated.append(ftrack_id)
+
+ if old_names:
+ self.check_names_synchronizable(old_names)
+
+ for parent_id, task_change in renamed_tasks.items():
+ avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id)
+ ent_info = task_change["ent_info"]
+ if not avalon_ent:
+ not_found[ent_info["entityId"]] = ent_info
+ continue
+
+ new_name = task_change["new"]
+ old_name = task_change["old"]
+ passed_regex = avalon_sync.check_regex(
+ new_name, "task", schema_patterns=self.regex_schemas
+ )
+ if not passed_regex:
+ ftrack_id = ent_info["enityId"]
+ self.regex_failed.append(ftrack_id)
+ continue
+
+ mongo_id = avalon_ent["_id"]
+ if mongo_id not in self.task_changes_by_avalon_id:
+ self.task_changes_by_avalon_id[mongo_id] = (
+ avalon_ent["data"]["tasks"]
+ )
+
+ if old_name in self.task_changes_by_avalon_id[mongo_id]:
+ self.task_changes_by_avalon_id[mongo_id].remove(old_name)
+ else:
+ parent_ftrack_ent = self.ftrack_ents_by_id.get(parent_id)
+ if not parent_ftrack_ent:
+ parent_ftrack_ent = self.process_session.query(
+ self.entities_query_by_id.format(
+ self.cur_project["id"], parent_id
+ )
+ ).first()
+
+ if parent_ftrack_ent:
+ self.ftrack_ents_by_id[parent_id] = parent_ftrack_ent
+ child_names = []
+ for child in parent_ftrack_ent["children"]:
+ if child.entity_type.lower() != "task":
+ continue
+ child_names.append(child["name"])
+
+ tasks = [task for task in (
+ self.task_changes_by_avalon_id[mongo_id]
+ )]
+ for task in tasks:
+ if task not in child_names:
+ self.task_changes_by_avalon_id[mongo_id].remove(
+ task
+ )
+
+ if new_name not in self.task_changes_by_avalon_id[mongo_id]:
+ self.task_changes_by_avalon_id[mongo_id].append(new_name)
+
+ # not_found are not processed since all not found are
+ # not found because they are not synchronizable
+
+ def process_added(self):
+ ent_infos = self.ftrack_added
+ if not ent_infos:
+ return
+
+ cust_attrs, hier_attrs = self.avalon_cust_attrs
+ entity_type_conf_ids = {}
+ # Skip if already exit in avalon db or tasks entities
+ # - happen when was created by any sync event/action
+ pop_out_ents = []
+ new_tasks_by_parent = collections.defaultdict(list)
+ _new_ent_infos = {}
+ for ftrack_id, ent_info in ent_infos.items():
+ if self.avalon_ents_by_ftrack_id.get(ftrack_id):
+ pop_out_ents.append(ftrack_id)
+ self.log.warning(
+ "Added entity is already synchronized <{}>".format(
+ self.get_ent_path(ftrack_id)
+ )
+ )
+ continue
+
+ entity_type = ent_info["entity_type"]
+ if entity_type == "Task":
+ parent_id = ent_info["parentId"]
+ new_tasks_by_parent[parent_id].append(ent_info)
+ pop_out_ents.append(ftrack_id)
+
+ configuration_id = entity_type_conf_ids.get(entity_type)
+ if not configuration_id:
+ for attr in cust_attrs:
+ key = attr["key"]
+ if key != CustAttrIdKey:
+ continue
+
+ if attr["entity_type"] != ent_info["entityType"]:
+ continue
+
+ if ent_info["entityType"] != "show":
+ if attr["object_type_id"] != ent_info["objectTypeId"]:
+ continue
+
+ configuration_id = attr["id"]
+ entity_type_conf_ids[entity_type] = configuration_id
+ break
+
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": ftrack_id
+ })
+
+ self.process_session.recorded_operations.push(
+ ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ ftrack_api.symbol.NOT_SET,
+ ""
+ )
+ )
+
+ try:
+ # Commit changes of mongo_id to empty string
+ self.process_session.commit()
+ self.log.debug("Commititng unsetting")
+ except Exception:
+ self.process_session.rollback()
+ # TODO logging
+ msg = (
+ "Could not set value of Custom attribute, where mongo id"
+ " is stored, to empty string. Ftrack ids: \"{}\""
+ ).format(", ".join(ent_infos.keys()))
+ self.log.warning(msg, exc_info=True)
+
+ for ftrack_id in pop_out_ents:
+ ent_infos.pop(ftrack_id)
+
+ # sort by parents length (same as by hierarchy level)
+ _ent_infos = sorted(
+ ent_infos.values(),
+ key=(lambda ent_info: len(ent_info.get("parents", [])))
+ )
+ to_sync_by_id = collections.OrderedDict()
+ for ent_info in _ent_infos:
+ ft_id = ent_info["entityId"]
+ to_sync_by_id[ft_id] = self.ftrack_ents_by_id[ft_id]
+
+ # cache regex success (for tasks)
+ for ftrack_id, entity in to_sync_by_id.items():
+ if entity.entity_type.lower() == "project":
+ raise Exception((
+ "Project can't be created with event handler!"
+ "This is a bug"
+ ))
+ parent_id = entity["parent_id"]
+ parent_avalon = self.avalon_ents_by_ftrack_id.get(parent_id)
+ if not parent_avalon:
+ # TODO logging
+ self.log.debug((
+ "Skipping synchronization of entity"
+ " because parent was not found in Avalon DB <{}>"
+ ).format(self.get_ent_path(ftrack_id)))
+ continue
+
+ is_synchonizable = True
+ name = entity["name"]
+ passed_regex = avalon_sync.check_regex(
+ name, "asset", schema_patterns=self.regex_schemas
+ )
+ if not passed_regex:
+ self.regex_failed.append(ftrack_id)
+ is_synchonizable = False
+
+ if name in self.avalon_ents_by_name:
+ self.duplicated.append(ftrack_id)
+ is_synchonizable = False
+
+ if not is_synchonizable:
+ continue
+
+ self.create_entity_in_avalon(entity, parent_avalon)
+
+ for parent_id, ent_infos in new_tasks_by_parent.items():
+ avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id)
+ if not avalon_ent:
+ # TODO logging
+ self.log.debug((
+ "Skipping synchronization of task"
+ " because parent was not found in Avalon DB <{}>"
+ ).format(self.get_ent_path(parent_id)))
+ continue
+
+ mongo_id = avalon_ent["_id"]
+ if mongo_id not in self.task_changes_by_avalon_id:
+ self.task_changes_by_avalon_id[mongo_id] = (
+ avalon_ent["data"]["tasks"]
+ )
+
+ for ent_info in ent_infos:
+ new_name = ent_info["changes"]["name"]["new"]
+ passed_regex = avalon_sync.check_regex(
+ new_name, "task", schema_patterns=self.regex_schemas
+ )
+ if not passed_regex:
+ self.regex_failed.append(ent_infos["entityId"])
+ continue
+
+ if new_name not in self.task_changes_by_avalon_id[mongo_id]:
+ self.task_changes_by_avalon_id[mongo_id].append(new_name)
+
+ def process_moved(self):
+ if not self.ftrack_moved:
+ return
+
+ ftrack_moved = {k: v for k, v in sorted(
+ self.ftrack_moved.items(),
+ key=(lambda line: len(
+ (line[1].get("data", {}).get("parents") or [])
+ ))
+ )}
+
+ for ftrack_id, ent_info in ftrack_moved.items():
+ avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if not avalon_ent:
+ continue
+
+ new_parent_id = ent_info["changes"]["parent_id"]["new"]
+ old_parent_id = ent_info["changes"]["parent_id"]["old"]
+
+ mongo_id = avalon_ent["_id"]
+ if self.changeability_by_mongo_id[mongo_id]:
+ par_av_ent = self.avalon_ents_by_ftrack_id.get(new_parent_id)
+ if not par_av_ent:
+ # TODO logging
+ # TODO report
+ ent_path_items = [self.cur_project["full_name"]]
+ ent_path_items.extend(avalon_ent["data"]["parents"])
+ ent_path_items.append(avalon_ent["name"])
+ ent_path = "/".join(ent_path_items)
+
+ error_msg = (
+ "New parent of entity is not synchronized to avalon"
+ )
+ report_msg = (
+ "{}||Parent in Avalon can't be changed. That"
+ " may cause issues. Please fix parent or move entity"
+ " under valid entity."
+ ).format(error_msg)
+
+ self.report_items["warning"][report_msg].append(ent_path)
+ self.log.warning("{} <{}>".format(error_msg, ent_path))
+ continue
+
+ # THIS MUST HAPPEND AFTER CREATING NEW ENTITIES !!!!
+ # - because may be moved to new created entity
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
+
+ vis_par_id = None
+ if par_av_ent["type"].lower() != "project":
+ vis_par_id = par_av_ent["_id"]
+ self.updates[mongo_id]["data"]["visualParent"] = vis_par_id
+ self.moved_in_avalon.append(mongo_id)
+
+ # TODO logging
+ ent_path_items = [self.cur_project["full_name"]]
+ ent_path_items.extend(par_av_ent["data"]["parents"])
+ ent_path_items.append(par_av_ent["name"])
+ ent_path_items.append(avalon_ent["name"])
+ ent_path = "/".join(ent_path_items)
+ self.log.debug((
+ "Parent of entity ({}) was changed in avalon <{}>"
+ ).format(str(mongo_id), ent_path)
+ )
+
+ else:
+ avalon_ent = self.avalon_ents_by_id[mongo_id]
+ avalon_parent_id = avalon_ent["data"]["visualParent"]
+ if avalon_parent_id is None:
+ avalon_parent_id = avalon_ent["parent"]
+
+ avalon_parent = self.avalon_ents_by_id[avalon_parent_id]
+ parent_id = avalon_parent["data"]["ftrackId"]
+
+ # For cases when parent was deleted at the same time
+ if parent_id in self.ftrack_recreated_mapping:
+ parent_id = (
+ self.ftrack_recreated_mapping[parent_id]
+ )
+
+ ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id)
+ if not ftrack_ent:
+ ftrack_ent = self.process_session.query(
+ self.entities_query_by_id.format(
+ self.cur_project["id"], ftrack_id
+ )
+ ).one()
+ self.ftrack_ents_by_id[ftrack_id] = ftrack_ent
+
+ if parent_id == ftrack_ent["parent_id"]:
+ continue
+
+ ftrack_ent["parent_id"] = parent_id
+ try:
+ self.process_session.commit()
+ # TODO logging
+ # TODO report
+ msg = "Entity was moved back"
+ report_msg = (
+ "{}||Entity can't be moved when"
+ " it or its children contain published data"
+ ).format(msg)
+ ent_path = self.get_ent_path(ftrack_id)
+ self.report_items["info"][report_msg].append(ent_path)
+ self.log.warning("{} <{}>".format(msg, ent_path))
+
+ except Exception:
+ self.process_session.rollback()
+ # TODO logging
+ # TODO report
+ error_msg = (
+ "Couldn't moved the entity back to its original parent"
+ )
+ report_msg = (
+ "{}||Moved back because it is not possible to"
+ " move with an entity or it's parents, "
+ " if it already contained published data."
+ ).format(error_msg)
+ error_traceback = (
+ str(traceback.format_exc()).replace("\n", "
")
+ ).replace(" ", " ")
+
+ item_msg = ent_path + "
" + error_traceback
+ self.report_items["warning"][report_msg].append(item_msg)
+ self.log.warning(
+ "{}: \"{}\"".format(error_msg, ent_path),
+ exc_info=True
+ )
+
+ def process_updated(self):
+ # Only custom attributes changes should get here
+ if not self.ftrack_updated:
+ return
+
+ ent_infos = self.ftrack_updated
+ ftrack_mongo_mapping = {}
+ not_found_ids = []
+ for ftrack_id, ent_info in ent_infos.items():
+ avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if not avalon_ent:
+ not_found_ids.append(ftrack_id)
+ continue
+
+ ftrack_mongo_mapping[ftrack_id] = avalon_ent["_id"]
+
+ for ftrack_id in not_found_ids:
+ ent_infos.pop(ftrack_id)
+
+ if not ent_infos:
+ return
+
+ cust_attrs, hier_attrs = self.avalon_cust_attrs
+ cust_attrs_by_obj_id = collections.defaultdict(dict)
+ for cust_attr in cust_attrs:
+ key = cust_attr["key"]
+ if key.startswith("avalon_"):
+ continue
+
+ ca_ent_type = cust_attr["entity_type"]
+
+ if ca_ent_type == "show":
+ cust_attrs_by_obj_id[ca_ent_type][key] = cust_attr
+ else:
+ obj_id = cust_attr["object_type_id"]
+ cust_attrs_by_obj_id[obj_id][key] = cust_attr
+
+ hier_attrs_keys = [attr["key"] for attr in hier_attrs]
+
+ for ftrack_id, ent_info in ent_infos.items():
+ mongo_id = ftrack_mongo_mapping[ftrack_id]
+ entType = ent_info["entityType"]
+ ent_path = self.get_ent_path(ftrack_id)
+ if entType == "show":
+ ent_cust_attrs = cust_attrs_by_obj_id.get("show")
+ else:
+ obj_type_id = ent_info["objectTypeId"]
+ ent_cust_attrs = cust_attrs_by_obj_id.get(obj_type_id)
+
+ for key, values in ent_info["changes"].items():
+ if key in hier_attrs_keys:
+ self.hier_cust_attrs_changes[key].append(ftrack_id)
+ continue
+
+ if key not in ent_cust_attrs:
+ continue
+
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
+ value = values["new"]
+ self.updates[mongo_id]["data"][key] = value
+ self.log.debug(
+ "Setting data value of \"{}\" to \"{}\" <{}>".format(
+ key, value, ent_path
+ )
+ )
+
+ if entType != "show" or key != "applications":
+ continue
+
+ # Store apps to project't config
+ apps_str = ent_info["changes"]["applications"]["new"]
+ cust_attr_apps = [app for app in apps_str.split(", ") if app]
+
+ proj_apps, warnings = (
+ avalon_sync.get_project_apps(cust_attr_apps)
+ )
+ if "config" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["config"] = {}
+ self.updates[mongo_id]["config"]["apps"] = proj_apps
+
+ for msg, items in warnings.items():
+ if not msg or not items:
+ continue
+ self.report_items["warning"][msg] = items
+
+ def process_hier_cleanup(self):
+ if (
+ not self.moved_in_avalon and
+ not self.renamed_in_avalon and
+ not self.hier_cust_attrs_changes and
+ not self.task_changes_by_avalon_id
):
return
- # check if project have Custom Attribute 'avalon_mongo_id'
- if ca_mongoid not in ft_project['custom_attributes']:
- message = (
- "Custom attribute '{}' for 'Project' is not created"
- " or don't have set permissions for API"
- ).format(ca_mongoid)
- self.log.warning(message)
- self.show_message(event, message, False)
- return
+ parent_changes = []
+ hier_cust_attrs_ids = []
+ hier_cust_attrs_keys = []
+ all_keys = False
+ for mongo_id in self.moved_in_avalon:
+ parent_changes.append(mongo_id)
+ hier_cust_attrs_ids.append(mongo_id)
+ all_keys = True
- # get avalon project if possible
- import_entities = []
+ for mongo_id in self.renamed_in_avalon:
+ if mongo_id not in parent_changes:
+ parent_changes.append(mongo_id)
- custom_attributes = lib.get_avalon_attr(session)
-
- avalon_project = lib.get_avalon_project(ft_project)
- if avalon_project is None:
- import_entities.append(ft_project)
-
- for entity in entities:
- if entity.entity_type.lower() in ['task']:
- entity = entity['parent']
-
- if 'custom_attributes' not in entity:
+ for key, ftrack_ids in self.hier_cust_attrs_changes.items():
+ if key.startswith("avalon_"):
continue
- if ca_mongoid not in entity['custom_attributes']:
+ for ftrack_id in ftrack_ids:
+ avalon_ent = self.avalon_ents_by_ftrack_id[ftrack_id]
+ mongo_id = avalon_ent["_id"]
+ if mongo_id in hier_cust_attrs_ids:
+ continue
+ hier_cust_attrs_ids.append(mongo_id)
+ if not all_keys and key not in hier_cust_attrs_keys:
+ hier_cust_attrs_keys.append(key)
- message = (
- "Custom attribute '{}' for '{}' is not created"
- " or don't have set permissions for API"
- ).format(ca_mongoid, entity.entity_type)
+ # Tasks preparation ****
+ for mongo_id, tasks in self.task_changes_by_avalon_id.items():
+ avalon_ent = self.avalon_ents_by_id[mongo_id]
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
- self.log.warning(message)
- self.show_message(event, message, False)
- return
+ self.updates[mongo_id]["data"]["tasks"] = tasks
- if entity not in import_entities:
- import_entities.append(entity)
+ # Parents preparation ***
+ mongo_to_ftrack_parents = {}
+ missing_ftrack_ents = {}
+ for mongo_id in parent_changes:
+ avalon_ent = self.avalon_ents_by_id[mongo_id]
+ ftrack_id = avalon_ent["data"]["ftrackId"]
+ if ftrack_id not in self.ftrack_ents_by_id:
+ missing_ftrack_ents[ftrack_id] = mongo_id
+ continue
+ ftrack_ent = self.ftrack_ents_by_id[ftrack_id]
+ mongo_to_ftrack_parents[mongo_id] = len(ftrack_ent["link"])
- if len(import_entities) < 1:
+ if missing_ftrack_ents:
+ joine_ids = ", ".join(
+ ["\"{}\"".format(id) for id in missing_ftrack_ents.keys()]
+ )
+ entities = self.process_session.query(
+ self.entities_query_by_id.format(
+ self.cur_project["id"], joine_ids
+ )
+ ).all()
+ for entity in entities:
+ ftrack_id = entity["id"]
+ self.ftrack_ents_by_id[ftrack_id] = entity
+ mongo_id = missing_ftrack_ents[ftrack_id]
+ mongo_to_ftrack_parents[mongo_id] = len(entity["link"])
+
+ stored_parents_by_mongo = {}
+ # sort by hierarchy level
+ mongo_to_ftrack_parents = [k for k, v in sorted(
+ mongo_to_ftrack_parents.items(),
+ key=(lambda item: item[1])
+ )]
+ self.log.debug(
+ "Updating parents and hieararchy because of name/parenting changes"
+ )
+ for mongo_id in mongo_to_ftrack_parents:
+ avalon_ent = self.avalon_ents_by_id[mongo_id]
+ vis_par = avalon_ent["data"]["visualParent"]
+ if vis_par in stored_parents_by_mongo:
+ parents = [par for par in stored_parents_by_mongo[vis_par]]
+ if vis_par is not None:
+ parent_ent = self.avalon_ents_by_id[vis_par]
+ parents.append(parent_ent["name"])
+ stored_parents_by_mongo[mongo_id] = parents
+ continue
+
+ ftrack_id = avalon_ent["data"]["ftrackId"]
+ ftrack_ent = self.ftrack_ents_by_id[ftrack_id]
+ ent_path_items = [ent["name"] for ent in ftrack_ent["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ stored_parents_by_mongo[mongo_id] = parents
+
+ for mongo_id, parents in stored_parents_by_mongo.items():
+ avalon_ent = self.avalon_ents_by_id[mongo_id]
+ cur_par = avalon_ent["data"]["parents"]
+ if cur_par == parents:
+ continue
+
+ hierarchy = ""
+ if len(parents) > 0:
+ hierarchy = os.path.sep.join(parents)
+
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
+ self.updates[mongo_id]["data"]["parents"] = parents
+ self.updates[mongo_id]["data"]["hierarchy"] = hierarchy
+
+ # Skip custom attributes if didn't change
+ if not hier_cust_attrs_ids:
+ # TODO logging
+ self.log.debug(
+ "Hierarchical attributes were not changed. Skipping"
+ )
+ self.update_entities()
return
- try:
- for entity in import_entities:
- result = lib.import_to_avalon(
- session=session,
- entity=entity,
- ft_project=ft_project,
- av_project=avalon_project,
- custom_attributes=custom_attributes
+ cust_attrs, hier_attrs = self.avalon_cust_attrs
+
+ # Hierarchical custom attributes preparation ***
+ if all_keys:
+ hier_cust_attrs_keys = [
+ attr["key"] for attr in hier_attrs if (
+ not attr["key"].startswith("avalon_")
)
- if 'errors' in result and len(result['errors']) > 0:
- session.commit()
- lib.show_errors(self, event, result['errors'])
-
- return
-
- if avalon_project is None:
- if 'project' in result:
- avalon_project = result['project']
-
- except Exception as e:
- # reset session to clear it
- session.rollback()
-
- message = str(e)
- title = 'Hey You! Unknown Error has been raised! (*look below*)'
- ftrack_message = (
- 'SyncToAvalon event ended with unexpected error'
- ' please check log file or contact Administrator'
- ' for more information.'
- )
- items = [
- {'type': 'label', 'value': '# Fatal Error'},
- {'type': 'label', 'value': '{}
'.format(ftrack_message)}
]
- self.show_interface(items, title, event=event)
- self.log.error(
- 'Fatal error during sync: {}'.format(message), exc_info=True
+
+ mongo_ftrack_mapping = {}
+ cust_attrs_ftrack_ids = []
+ # ftrack_parenting = collections.defaultdict(list)
+ entities_dict = collections.defaultdict(dict)
+
+ children_queue = queue.Queue()
+ parent_queue = queue.Queue()
+
+ for mongo_id in hier_cust_attrs_ids:
+ avalon_ent = self.avalon_ents_by_id[mongo_id]
+ parent_queue.put(avalon_ent)
+ ftrack_id = avalon_ent["data"]["ftrackId"]
+ if ftrack_id not in entities_dict:
+ entities_dict[ftrack_id] = {
+ "children": [],
+ "parent_id": None,
+ "hier_attrs": {}
+ }
+
+ mongo_ftrack_mapping[mongo_id] = ftrack_id
+ cust_attrs_ftrack_ids.append(ftrack_id)
+ children_ents = self.avalon_ents_by_parent_id.get(mongo_id) or []
+ for children_ent in children_ents:
+ _ftrack_id = children_ent["data"]["ftrackId"]
+ if _ftrack_id in entities_dict:
+ continue
+
+ entities_dict[_ftrack_id] = {
+ "children": [],
+ "parent_id": None,
+ "hier_attrs": {}
+ }
+ # if _ftrack_id not in ftrack_parenting[ftrack_id]:
+ # ftrack_parenting[ftrack_id].append(_ftrack_id)
+ entities_dict[_ftrack_id]["parent_id"] = ftrack_id
+ if _ftrack_id not in entities_dict[ftrack_id]["children"]:
+ entities_dict[ftrack_id]["children"].append(_ftrack_id)
+ children_queue.put(children_ent)
+
+ while not children_queue.empty():
+ avalon_ent = children_queue.get()
+ mongo_id = avalon_ent["_id"]
+ ftrack_id = avalon_ent["data"]["ftrackId"]
+ if ftrack_id in cust_attrs_ftrack_ids:
+ continue
+
+ mongo_ftrack_mapping[mongo_id] = ftrack_id
+ cust_attrs_ftrack_ids.append(ftrack_id)
+
+ children_ents = self.avalon_ents_by_parent_id.get(mongo_id) or []
+ for children_ent in children_ents:
+ _ftrack_id = children_ent["data"]["ftrackId"]
+ if _ftrack_id in entities_dict:
+ continue
+
+ entities_dict[_ftrack_id] = {
+ "children": [],
+ "parent_id": None,
+ "hier_attrs": {}
+ }
+ entities_dict[_ftrack_id]["parent_id"] = ftrack_id
+ if _ftrack_id not in entities_dict[ftrack_id]["children"]:
+ entities_dict[ftrack_id]["children"].append(_ftrack_id)
+ children_queue.put(children_ent)
+
+ while not parent_queue.empty():
+ avalon_ent = parent_queue.get()
+ if avalon_ent["type"].lower() == "project":
+ continue
+
+ ftrack_id = avalon_ent["data"]["ftrackId"]
+
+ vis_par = avalon_ent["data"]["visualParent"]
+ if vis_par is None:
+ vis_par = avalon_ent["parent"]
+
+ parent_ent = self.avalon_ents_by_id[vis_par]
+ parent_ftrack_id = parent_ent["data"]["ftrackId"]
+ if parent_ftrack_id not in entities_dict:
+ entities_dict[parent_ftrack_id] = {
+ "children": [],
+ "parent_id": None,
+ "hier_attrs": {}
+ }
+
+ if ftrack_id not in entities_dict[parent_ftrack_id]["children"]:
+ entities_dict[parent_ftrack_id]["children"].append(ftrack_id)
+
+ entities_dict[ftrack_id]["parent_id"] = parent_ftrack_id
+
+ if parent_ftrack_id in cust_attrs_ftrack_ids:
+ continue
+ mongo_ftrack_mapping[vis_par] = parent_ftrack_id
+ cust_attrs_ftrack_ids.append(parent_ftrack_id)
+ # if ftrack_id not in ftrack_parenting[parent_ftrack_id]:
+ # ftrack_parenting[parent_ftrack_id].append(ftrack_id)
+
+ parent_queue.put(parent_ent)
+
+ # Prepare values to query
+ entity_ids_joined = ", ".join([
+ "\"{}\"".format(id) for id in cust_attrs_ftrack_ids
+ ])
+ attributes_joined = ", ".join([
+ "\"{}\"".format(name) for name in hier_cust_attrs_keys
+ ])
+
+ queries = [{
+ "action": "query",
+ "expression": (
+ "select value, entity_id from CustomAttributeValue "
+ "where entity_id in ({}) and configuration.key in ({})"
+ ).format(entity_ids_joined, attributes_joined)
+ }]
+
+ if hasattr(self.process_session, "call"):
+ [values] = self.process_session.call(queries)
+ else:
+ [values] = self.process_session._call(queries)
+
+ ftrack_project_id = self.cur_project["id"]
+
+ for attr in hier_attrs:
+ key = attr["key"]
+ if key not in hier_cust_attrs_keys:
+ continue
+ entities_dict[ftrack_project_id]["hier_attrs"][key] = (
+ attr["default"]
)
- return
+ # PREPARE DATA BEFORE THIS
+ avalon_hier = []
+ for value in values["data"]:
+ if value["value"] is None:
+ continue
+ entity_id = value["entity_id"]
+ key = value["configuration"]["key"]
+ entities_dict[entity_id]["hier_attrs"][key] = value["value"]
+
+ # Get dictionary with not None hierarchical values to pull to childs
+ project_values = {}
+ for key, value in (
+ entities_dict[ftrack_project_id]["hier_attrs"].items()
+ ):
+ if value is not None:
+ project_values[key] = value
+
+ for key in avalon_hier:
+ value = entities_dict[ftrack_project_id]["avalon_attrs"][key]
+ if value is not None:
+ project_values[key] = value
+
+ hier_down_queue = queue.Queue()
+ hier_down_queue.put((project_values, ftrack_project_id))
+
+ while not hier_down_queue.empty():
+ hier_values, parent_id = hier_down_queue.get()
+ for child_id in entities_dict[parent_id]["children"]:
+ _hier_values = hier_values.copy()
+ for name in hier_cust_attrs_keys:
+ value = entities_dict[child_id]["hier_attrs"].get(name)
+ if value is not None:
+ _hier_values[name] = value
+
+ entities_dict[child_id]["hier_attrs"].update(_hier_values)
+ hier_down_queue.put((_hier_values, child_id))
+
+ ftrack_mongo_mapping = {}
+ for mongo_id, ftrack_id in mongo_ftrack_mapping.items():
+ ftrack_mongo_mapping[ftrack_id] = mongo_id
+
+ for ftrack_id, data in entities_dict.items():
+ mongo_id = ftrack_mongo_mapping[ftrack_id]
+ avalon_ent = self.avalon_ents_by_id[mongo_id]
+ ent_path = self.get_ent_path(ftrack_id)
+ # TODO logging
+ self.log.debug(
+ "Updating hierarchical attributes <{}>".format(ent_path)
+ )
+ for key, value in data["hier_attrs"].items():
+ if (
+ key in avalon_ent["data"] and
+ avalon_ent["data"][key] == value
+ ):
+ continue
+
+ self.log.debug("- {}: {}".format(key, value))
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
+
+ self.updates[mongo_id]["data"][key] = value
+
+ self.update_entities()
+
+ def update_entities(self):
+ mongo_changes_bulk = []
+ for mongo_id, changes in self.updates.items():
+ filter = {"_id": mongo_id}
+ change_data = avalon_sync.from_dict_to_set(changes)
+ mongo_changes_bulk.append(UpdateOne(filter, change_data))
+
+ if not mongo_changes_bulk:
+ return
+
+ self.dbcon.bulk_write(mongo_changes_bulk)
+ self.updates = collections.defaultdict(dict)
+
+ @property
+ def duplicated_report(self):
+ if not self.duplicated:
+ return []
+
+ ft_project = self.cur_project
+ duplicated_names = []
+ for ftrack_id in self.duplicated:
+ ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id)
+ if not ftrack_ent:
+ ftrack_ent = self.process_session.query(
+ self.entities_query_by_id.format(
+ ft_project["id"], ftrack_id
+ )
+ ).one()
+ self.ftrack_ents_by_id[ftrack_id] = ftrack_ent
+ name = ftrack_ent["name"]
+ if name not in duplicated_names:
+ duplicated_names.append(name)
+
+ joined_names = ", ".join(
+ ["\"{}\"".format(name) for name in duplicated_names]
+ )
+ ft_ents = self.process_session.query(
+ self.entities_name_query_by_name.format(
+ ft_project["id"], joined_names
+ )
+ ).all()
+
+ ft_ents_by_name = collections.defaultdict(list)
+ for ft_ent in ft_ents:
+ name = ft_ent["name"]
+ ft_ents_by_name[name].append(ft_ent)
+
+ if not ft_ents_by_name:
+ return []
+
+ subtitle = "Duplicated entity names:"
+ items = []
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: It is not allowed to use the same name"
+ " for multiple entities in the same project
"
+ )
+ })
+
+ for name, ents in ft_ents_by_name.items():
+ items.append({
+ "type": "label",
+ "value": "## {}".format(name)
+ })
+ paths = []
+ for ent in ents:
+ ftrack_id = ent["id"]
+ ent_path = "/".join([_ent["name"] for _ent in ent["link"]])
+ avalon_ent = self.avalon_ents_by_id.get(ftrack_id)
+
+ if avalon_ent:
+ additional = " (synchronized)"
+ if avalon_ent["name"] != name:
+ additional = " (synchronized as {})".format(
+ avalon_ent["name"]
+ )
+ ent_path += additional
+ paths.append(ent_path)
+
+ items.append({
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ })
+
+ return items
+
+ @property
+ def regex_report(self):
+ if not self.regex_failed:
+ return []
+
+ subtitle = "Entity names contain prohibited symbols:"
+ items = []
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: You can use Letters( a-Z ),"
+ " Numbers( 0-9 ) and Underscore( _ )
"
+ )
+ })
+
+ ft_project = self.cur_project
+ for ftrack_id in self.regex_failed:
+ ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id)
+ if not ftrack_ent:
+ ftrack_ent = self.process_session.query(
+ self.entities_query_by_id.format(
+ ft_project["id"], ftrack_id
+ )
+ ).one()
+ self.ftrack_ents_by_id[ftrack_id] = ftrack_ent
+
+ name = ftrack_ent["name"]
+ ent_path_items = [_ent["name"] for _ent in ftrack_ent["link"][:-1]]
+ ent_path_items.append("{}".format(name))
+ ent_path = "/".join(ent_path_items)
+ items.append({
+ "type": "label",
+ "value": "{} - {}
".format(name, ent_path)
+ })
+
+ return items
+
+ def report(self):
+ msg_len = len(self.duplicated) + len(self.regex_failed)
+ for msgs in self.report_items.values():
+ msg_len += len(msgs)
+
+ if msg_len == 0:
+ return
+
+ items = []
+ project_name = self.cur_project["full_name"]
+ title = "Synchronization report ({}):".format(project_name)
+
+ keys = ["error", "warning", "info"]
+ for key in keys:
+ subitems = []
+ if key == "warning":
+ subitems.extend(self.duplicated_report)
+ subitems.extend(self.regex_report)
+
+ for _msg, _items in self.report_items[key].items():
+ if not _items:
+ continue
+
+ msg_items = _msg.split("||")
+ msg = msg_items[0]
+ subitems.append({
+ "type": "label",
+ "value": "# {}".format(msg)
+ })
+
+ if len(msg_items) > 1:
+ for note in msg_items[1:]:
+ subitems.append({
+ "type": "label",
+ "value": "NOTE: {}
".format(note)
+ })
+
+ if isinstance(_items, str):
+ _items = [_items]
+ subitems.append({
+ "type": "label",
+ "value": '{}
'.format("
".join(_items))
+ })
+
+ if items and subitems:
+ items.append(self.report_splitter)
+
+ items.extend(subitems)
+
+ self.show_interface(
+ items=items,
+ title=title,
+ event=self._cur_event
+ )
+ return True
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
- SyncToAvalon(session, plugins_presets).register()
+ SyncToAvalonEvent(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_thumbnail_updates.py b/pype/ftrack/events/event_thumbnail_updates.py
index 47909da055..5421aa7543 100644
--- a/pype/ftrack/events/event_thumbnail_updates.py
+++ b/pype/ftrack/events/event_thumbnail_updates.py
@@ -1,4 +1,3 @@
-import ftrack_api
from pype.ftrack import BaseEvent
@@ -26,28 +25,34 @@ class ThumbnailEvents(BaseEvent):
# Update task thumbnail from published version
# if (entity['entityType'] == 'assetversion' and
# entity['action'] == 'encoded'):
- if (
- entity['entityType'] == 'assetversion'
- and 'thumbid' in (entity.get('keys') or [])
+ elif (
+ entity['entityType'] == 'assetversion' and
+ entity['action'] != 'remove' and
+ 'thumbid' in (entity.get('keys') or [])
):
version = session.get('AssetVersion', entity['entityId'])
+ if not version:
+ continue
+
thumbnail = version.get('thumbnail')
- if thumbnail:
- parent = version['asset']['parent']
- task = version['task']
- parent['thumbnail_id'] = version['thumbnail_id']
- if parent.entity_type.lower() == "project":
- name = parent["full_name"]
- else:
- name = parent["name"]
- msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
+ if not thumbnail:
+ continue
- if task:
- task['thumbnail_id'] = version['thumbnail_id']
- msg += " and task [ {} ]".format(task["name"])
+ parent = version['asset']['parent']
+ task = version['task']
+ parent['thumbnail_id'] = version['thumbnail_id']
+ if parent.entity_type.lower() == "project":
+ name = parent["full_name"]
+ else:
+ name = parent["name"]
+ msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
- self.log.info(msg)
+ if task:
+ task['thumbnail_id'] = version['thumbnail_id']
+ msg += " and task [ {} ]".format(task["name"])
+
+ self.log.info(msg)
try:
session.commit()
@@ -57,5 +62,4 @@ class ThumbnailEvents(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
-
ThumbnailEvents(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py
index fe15eb1e20..87994d34b2 100644
--- a/pype/ftrack/events/event_user_assigment.py
+++ b/pype/ftrack/events/event_user_assigment.py
@@ -1,12 +1,15 @@
-import ftrack_api
-from pype.ftrack import BaseEvent, lib
-from pype.ftrack.lib.io_nonsingleton import DbConnector
-from bson.objectid import ObjectId
-from pypeapp import config
-from pypeapp import Anatomy
-import subprocess
import os
import re
+import subprocess
+
+from pype.ftrack import BaseEvent
+from pype.ftrack.lib.avalon_sync import CustAttrIdKey
+from pype.ftrack.lib.io_nonsingleton import DbConnector
+
+from bson.objectid import ObjectId
+
+from pypeapp import config
+from pypeapp import Anatomy
class UserAssigmentEvent(BaseEvent):
@@ -36,7 +39,6 @@ class UserAssigmentEvent(BaseEvent):
"""
db_con = DbConnector()
- ca_mongoid = lib.get_ca_mongoid()
def error(self, *err):
for e in err:
@@ -105,7 +107,7 @@ class UserAssigmentEvent(BaseEvent):
self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
avalon_entity = None
- parent_id = parent['custom_attributes'].get(self.ca_mongoid)
+ parent_id = parent['custom_attributes'].get(CustAttrIdKey)
if parent_id:
parent_id = ObjectId(parent_id)
avalon_entity = self.db_con.find_one({
diff --git a/pype/ftrack/ftrack_server/session_processor.py b/pype/ftrack/ftrack_server/session_processor.py
index 86a9775dce..133719bab4 100644
--- a/pype/ftrack/ftrack_server/session_processor.py
+++ b/pype/ftrack/ftrack_server/session_processor.py
@@ -41,7 +41,7 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub):
def prepare_dbcon(self):
try:
self.dbcon.install()
- self.dbcon._database.collection_names()
+ self.dbcon._database.list_collection_names()
except pymongo.errors.AutoReconnect:
log.error("Mongo server \"{}\" is not responding, exiting.".format(
os.environ["AVALON_MONGO"]
diff --git a/pype/ftrack/ftrack_server/session_storer.py b/pype/ftrack/ftrack_server/session_storer.py
index b3201c9e4d..0b44d7d3a1 100644
--- a/pype/ftrack/ftrack_server/session_storer.py
+++ b/pype/ftrack/ftrack_server/session_storer.py
@@ -21,11 +21,23 @@ class StorerEventHub(ftrack_api.event.hub.EventHub):
def _handle_packet(self, code, packet_identifier, path, data):
"""Override `_handle_packet` which extend heartbeat"""
- if self._code_name_mapping[code] == "heartbeat":
+ code_name = self._code_name_mapping[code]
+ if code_name == "heartbeat":
# Reply with heartbeat.
self.sock.sendall(b"storer")
return self._send_packet(self._code_name_mapping['heartbeat'])
+ elif code_name == "connect":
+ event = ftrack_api.event.base.Event(
+ topic="pype.storer.started",
+ data={},
+ source={
+ "id": self.id,
+ "user": {"username": self._api_user}
+ }
+ )
+ self._event_queue.put(event)
+
return super(StorerEventHub, self)._handle_packet(
code, packet_identifier, path, data
)
diff --git a/pype/ftrack/ftrack_server/sub_event_storer.py b/pype/ftrack/ftrack_server/sub_event_storer.py
index aaaf63accd..4828b10bfa 100644
--- a/pype/ftrack/ftrack_server/sub_event_storer.py
+++ b/pype/ftrack/ftrack_server/sub_event_storer.py
@@ -5,6 +5,7 @@ import signal
import socket
import pymongo
+import ftrack_api
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
from pype.ftrack.lib.custom_db_connector import DbConnector
@@ -15,6 +16,13 @@ log = Logger().get_logger("Event storer")
url, database, table_name = get_ftrack_event_mongo_info()
+
+class SessionClass:
+ def __init__(self):
+ self.session = None
+
+
+session_obj = SessionClass()
dbcon = DbConnector(
mongo_url=url,
database_name=database,
@@ -24,10 +32,11 @@ dbcon = DbConnector(
# ignore_topics = ["ftrack.meta.connected"]
ignore_topics = []
+
def install_db():
try:
dbcon.install()
- dbcon._database.collection_names()
+ dbcon._database.list_collection_names()
except pymongo.errors.AutoReconnect:
log.error("Mongo server \"{}\" is not responding, exiting.".format(
os.environ["AVALON_MONGO"]
@@ -49,7 +58,7 @@ def launch(event):
try:
# dbcon.insert_one(event_data)
- dbcon.update({"id": event_id}, event_data, upsert=True)
+ dbcon.replace_one({"id": event_id}, event_data, upsert=True)
log.debug("Event: {} stored".format(event_id))
except pymongo.errors.AutoReconnect:
@@ -65,10 +74,71 @@ def launch(event):
)
+def trigger_sync(event):
+ session = session_obj.session
+ if session is None:
+ log.warning("Session is not set. Can't trigger Sync to avalon action.")
+ return True
+
+ projects = session.query("Project").all()
+ if not projects:
+ return True
+
+ query = {
+ "pype_data.is_processed": False,
+ "topic": "ftrack.action.launch",
+ "data.actionIdentifier": "sync.to.avalon.server"
+ }
+ set_dict = {
+ "$set": {"pype_data.is_processed": True}
+ }
+ dbcon.update_many(query, set_dict)
+
+ selections = []
+ for project in projects:
+ if project["status"] != "active":
+ continue
+
+ auto_sync = project["custom_attributes"].get("avalon_auto_sync")
+ if not auto_sync:
+ continue
+
+ selections.append({
+ "entityId": project["id"],
+ "entityType": "show"
+ })
+
+ if not selections:
+ return
+
+ user = session.query(
+ "User where username is \"{}\"".format(session.api_user)
+ ).one()
+ user_data = {
+ "username": user["username"],
+ "id": user["id"]
+ }
+
+ for selection in selections:
+ event_data = {
+ "actionIdentifier": "sync.to.avalon.server",
+ "selection": [selection]
+ }
+ session.event_hub.publish(
+ ftrack_api.event.base.Event(
+ topic="ftrack.action.launch",
+ data=event_data,
+ source=dict(user=user_data)
+ ),
+ on_error="ignore"
+ )
+
+
def register(session):
'''Registers the event, subscribing the discover and launch topics.'''
install_db()
session.event_hub.subscribe("topic=*", launch)
+ session.event_hub.subscribe("topic=pype.storer.started", trigger_sync)
def main(args):
@@ -85,6 +155,7 @@ def main(args):
try:
session = StorerSession(auto_connect_event_hub=True, sock=sock)
+ session_obj.session = session
register(session)
server = FtrackServer("event")
log.debug("Launched Ftrack Event storer")
diff --git a/pype/ftrack/lib/__init__.py b/pype/ftrack/lib/__init__.py
index 6198230e57..9af9ded943 100644
--- a/pype/ftrack/lib/__init__.py
+++ b/pype/ftrack/lib/__init__.py
@@ -1,4 +1,4 @@
-from .avalon_sync import *
+from . import avalon_sync
from .credentials import *
from .ftrack_app_handler import *
from .ftrack_event_handler import *
diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py
index 0baf99d2cf..064ea1adb8 100644
--- a/pype/ftrack/lib/avalon_sync.py
+++ b/pype/ftrack/lib/avalon_sync.py
@@ -1,369 +1,82 @@
import os
import re
-import json
-from pype.lib import get_avalon_database
-from bson.objectid import ObjectId
+import queue
+import collections
+
+from pype.ftrack.lib.io_nonsingleton import DbConnector
+
import avalon
import avalon.api
-from avalon import schema
-from avalon.vendor import toml, jsonschema
-from pypeapp import Logger, Anatomy, config
+from avalon.vendor import toml
+from pypeapp import Logger, Anatomy
+
+from bson.objectid import ObjectId
+from bson.errors import InvalidId
+from pymongo import UpdateOne
+import ftrack_api
-ValidationError = jsonschema.ValidationError
log = Logger().get_logger(__name__)
-def get_ca_mongoid():
- # returns name of Custom attribute that stores mongo_id
- return 'avalon_mongo_id'
+# Current schemas for avalon types
+EntitySchemas = {
+ "project": "avalon-core:project-2.0",
+ "asset": "avalon-core:asset-3.0",
+ "config": "avalon-core:config-1.0"
+}
+
+# name of Custom attribute that stores mongo_id from avalon db
+CustAttrIdKey = "avalon_mongo_id"
+CustAttrAutoSync = "avalon_auto_sync"
-def import_to_avalon(
- session, entity, ft_project, av_project, custom_attributes
-):
- database = get_avalon_database()
- project_name = ft_project['full_name']
- output = {}
- errors = []
+def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
+ schema_name = "asset-3.0"
+ if in_schema:
+ schema_name = in_schema
+ elif entity_type == "project":
+ schema_name = "project-2.0"
+ elif entity_type == "task":
+ schema_name = "task"
- entity_type = entity.entity_type
- ent_path = "/".join([ent["name"] for ent in entity['link']])
-
- log.debug("{} [{}] - Processing".format(ent_path, entity_type))
-
- ca_mongoid = get_ca_mongoid()
- # Validate if entity has custom attribute avalon_mongo_id
- if ca_mongoid not in entity['custom_attributes']:
- msg = (
- 'Custom attribute "{}" for "{}" is not created'
- ' or don\'t have set permissions for API'
- ).format(ca_mongoid, entity['name'])
- log.error(msg)
- errors.append({'Custom attribute error': msg})
- output['errors'] = errors
- return output
-
- # Validate if entity name match REGEX in schema
- avalon_check_name(entity)
-
- entity_type = entity.entity_type
- # Project ////////////////////////////////////////////////////////////////
- if entity_type in ['Project']:
- type = 'project'
-
- proj_config = get_project_config(entity)
- schema.validate(proj_config)
-
- av_project_code = None
- if av_project is not None and 'code' in av_project['data']:
- av_project_code = av_project['data']['code']
- ft_project_code = ft_project['name']
-
- if av_project is None:
- log.debug("{} - Creating project".format(project_name))
- item = {
- 'schema': "avalon-core:project-2.0",
- 'type': type,
- 'name': project_name,
- 'data': dict(),
- 'config': proj_config,
- 'parent': None,
- }
- schema.validate(item)
-
- database[project_name].insert_one(item)
-
- av_project = database[project_name].find_one(
- {'type': type}
- )
-
- elif (
- av_project['name'] != project_name or
- (
- av_project_code is not None and
- av_project_code != ft_project_code
- )
- ):
- msg = (
- 'You can\'t change {0} "{1}" to "{2}"'
- ', avalon wouldn\'t work properly!'
- '\n{0} was changed back!'
- )
- if av_project['name'] != project_name:
- entity['full_name'] = av_project['name']
- errors.append(
- {'Changed name error': msg.format(
- 'Project name', av_project['name'], project_name
- )}
- )
-
- if (
- av_project_code is not None and
- av_project_code != ft_project_code
- ):
- log.warning((
- "{0} - Project code"
- " is different in Avalon (\"{1}\")"
- " that in Ftrack (\"{2}\")!"
- " Trying to change it back in Ftrack to \"{1}\"."
- ).format(
- ent_path, str(av_project_code), str(ft_project_code)
- ))
-
- entity['name'] = av_project_code
- errors.append(
- {'Changed name error': msg.format(
- 'Project code', av_project_code, ft_project_code
- )}
- )
-
- try:
- session.commit()
- log.info((
- "{} - Project code was changed back to \"{}\""
- ).format(ent_path, str(av_project_code)))
- except Exception:
- log.error(
- (
- "{} - Couldn't change project code back to \"{}\"."
- ).format(ent_path, str(av_project_code)),
- exc_info=True
- )
-
- output['errors'] = errors
- return output
+ name_pattern = None
+ if schema_patterns is not None:
+ name_pattern = schema_patterns.get(schema_name)
+ if not name_pattern:
+ default_pattern = "^[a-zA-Z0-9_.]*$"
+ schema_obj = avalon.schema._cache.get(schema_name + ".json")
+ if not schema_obj:
+ name_pattern = default_pattern
else:
- # not override existing templates!
- templates = av_project['config'].get('template', None)
- if templates is not None:
- for key, value in proj_config['template'].items():
- if (
- key in templates and
- templates[key] is not None and
- templates[key] != value
- ):
- proj_config['template'][key] = templates[key]
-
- projectId = av_project['_id']
-
- data = get_data(
- entity, session, custom_attributes
- )
-
- cur_data = av_project.get('data') or {}
-
- enter_data = {}
- for k, v in cur_data.items():
- enter_data[k] = v
- for k, v in data.items():
- enter_data[k] = v
-
- log.debug("{} - Updating data".format(ent_path))
- database[project_name].update_many(
- {'_id': ObjectId(projectId)},
- {'$set': {
- 'name': project_name,
- 'config': proj_config,
- 'data': data
- }}
- )
-
- entity['custom_attributes'][ca_mongoid] = str(projectId)
- session.commit()
-
- output['project'] = av_project
-
- return output
-
- # Asset - /////////////////////////////////////////////////////////////
- if av_project is None:
- result = import_to_avalon(
- session, ft_project, ft_project, av_project, custom_attributes
- )
-
- if 'errors' in result:
- output['errors'] = result['errors']
- return output
-
- elif 'project' not in result:
- msg = 'During project import went something wrong'
- errors.append({'Unexpected error': msg})
- output['errors'] = errors
- return output
-
- av_project = result['project']
- output['project'] = result['project']
-
- projectId = av_project['_id']
- data = get_data(
- entity, session, custom_attributes
- )
-
- name = entity['name']
-
- avalon_asset = None
- # existence of this custom attr is already checked
- if ca_mongoid not in entity['custom_attributes']:
- msg = (
- "Entity type \"{}\" don't have created custom attribute \"{}\""
- " or user \"{}\" don't have permissions to read or change it."
- ).format(entity_type, ca_mongoid, session.api_user)
-
- log.error(msg)
- errors.append({'Missing Custom attribute': msg})
- output['errors'] = errors
- return output
-
- mongo_id = entity['custom_attributes'][ca_mongoid]
- mongo_id = mongo_id.replace(' ', '').replace('\n', '')
- try:
- ObjectId(mongo_id)
- except Exception:
- mongo_id = ''
-
- if mongo_id != '':
- avalon_asset = database[project_name].find_one(
- {'_id': ObjectId(mongo_id)}
- )
-
- if avalon_asset is None:
- avalon_asset = database[project_name].find_one(
- {'type': 'asset', 'name': name}
- )
- if avalon_asset is None:
- item = {
- 'schema': "avalon-core:asset-3.0",
- 'name': name,
- 'parent': ObjectId(projectId),
- 'type': 'asset',
- 'data': data
- }
- schema.validate(item)
- mongo_id = database[project_name].insert_one(item).inserted_id
- log.debug("{} - Created in project \"{}\"".format(
- ent_path, project_name
- ))
- # Raise error if it seems to be different ent. with same name
- elif avalon_asset['data']['parents'] != data['parents']:
- msg = (
- "{} - In Avalon DB already exists entity with name \"{}\""
- "\n- \"{}\""
- ).format(ent_path, name, "/".join(db_asset_path_items))
- log.error(msg)
- errors.append({'Entity name duplication': msg})
- output['errors'] = errors
- return output
-
- # Store new ID (in case that asset was removed from DB)
- else:
- mongo_id = avalon_asset['_id']
- else:
- if avalon_asset['name'] != entity['name']:
- if changeability_check_childs(entity) is False:
- msg = (
- '{} - You can\'t change name "{}" to "{}"'
- ', avalon wouldn\'t work properly!'
- '\n\nName was changed back!'
- '\n\nCreate new entity if you want to change name.'
- ).format(ent_path, avalon_asset['name'], entity['name'])
-
- log.warning(msg)
- entity['name'] = avalon_asset['name']
- session.commit()
- errors.append({'Changed name error': msg})
-
- if avalon_asset['data']['parents'] != data['parents']:
- old_path = '/'.join(avalon_asset['data']['parents'])
- new_path = '/'.join(data['parents'])
-
- msg = (
- 'You can\'t move with entities.'
- '\nEntity "{}" was moved from "{}" to "{}"'
- '\n\nAvalon won\'t work properly, {}!'
+ name_pattern = schema_obj.get(
+ "properties", {}).get(
+ "name", {}).get(
+ "pattern", default_pattern
)
+ if schema_patterns is not None:
+ schema_patterns[schema_name] = name_pattern
- moved_back = False
- if 'visualParent' in avalon_asset['data']:
- asset_parent_id = avalon_asset['data']['visualParent'] or avalon_asset['parent']
-
- asset_parent = database[project_name].find_one(
- {'_id': ObjectId(asset_parent_id)}
- )
- ft_parent_id = asset_parent['data']['ftrackId']
- try:
- entity['parent_id'] = ft_parent_id
- session.commit()
- msg = msg.format(
- avalon_asset['name'], old_path, new_path,
- 'entity was moved back'
- )
- log.warning(msg)
- moved_back = True
-
- except Exception:
- moved_back = False
-
- if moved_back is False:
- msg = msg.format(
- avalon_asset['name'], old_path, new_path,
- 'please move it back'
- )
- log.error(msg)
-
- errors.append({'Hierarchy change error': msg})
-
- if len(errors) > 0:
- output['errors'] = errors
- return output
-
- avalon_asset = database[project_name].find_one(
- {'_id': ObjectId(mongo_id)}
- )
-
- cur_data = avalon_asset.get('data') or {}
-
- enter_data = {}
- for k, v in cur_data.items():
- enter_data[k] = v
- for k, v in data.items():
- enter_data[k] = v
-
- database[project_name].update_many(
- {'_id': ObjectId(mongo_id)},
- {'$set': {
- 'name': name,
- 'data': enter_data,
- 'parent': ObjectId(projectId)
- }})
- log.debug("{} - Updated data (in project \"{}\")".format(
- ent_path, project_name
- ))
- entity['custom_attributes'][ca_mongoid] = str(mongo_id)
- session.commit()
-
- return output
+ if re.match(name_pattern, name):
+ return True
+ return False
-def get_avalon_attr(session, split_hierarchical=False):
+def get_avalon_attr(session, split_hierarchical=True):
custom_attributes = []
hier_custom_attributes = []
cust_attrs_query = (
- "select id, entity_type, object_type_id, is_hierarchical"
+ "select id, entity_type, object_type_id, is_hierarchical, default"
" from CustomAttributeConfiguration"
" where group.name = \"avalon\""
)
all_avalon_attr = session.query(cust_attrs_query).all()
for cust_attr in all_avalon_attr:
- if 'avalon_' in cust_attr['key']:
+ if split_hierarchical and cust_attr["is_hierarchical"]:
+ hier_custom_attributes.append(cust_attr)
continue
- if split_hierarchical:
- if cust_attr["is_hierarchical"]:
- hier_custom_attributes.append(cust_attr)
- continue
-
custom_attributes.append(cust_attr)
if split_hierarchical:
@@ -373,256 +86,2185 @@ def get_avalon_attr(session, split_hierarchical=False):
return custom_attributes
-def changeability_check_childs(entity):
- if (entity.entity_type.lower() != 'task' and 'children' not in entity):
- return True
- childs = entity['children']
- for child in childs:
- if child.entity_type.lower() == 'task':
- available_statuses = config.get_presets().get(
- "ftrack", {}).get(
- "ftrack_config", {}).get(
- "sync_to_avalon", {}).get(
- "statuses_name_change", []
- )
- ent_status = child['status']['name'].lower()
- if ent_status not in available_statuses:
- return False
- # If not task go deeper
- elif changeability_check_childs(child) is False:
- return False
- # If everything is allright
- return True
+def from_dict_to_set(data):
+ result = {"$set": {}}
+ dict_queue = queue.Queue()
+ dict_queue.put((None, data))
+
+ while not dict_queue.empty():
+ _key, _data = dict_queue.get()
+ for key, value in _data.items():
+ new_key = key
+ if _key is not None:
+ new_key = "{}.{}".format(_key, key)
+
+ if not isinstance(value, dict):
+ result["$set"][new_key] = value
+ continue
+ dict_queue.put((new_key, value))
+ return result
-def get_data(entity, session, custom_attributes):
- database = get_avalon_database()
-
- entity_type = entity.entity_type
-
- if entity_type.lower() == 'project':
- ft_project = entity
- elif entity_type.lower() != 'project':
- ft_project = entity['project']
- av_project = get_avalon_project(ft_project)
-
- project_name = ft_project['full_name']
-
- data = {}
- data['ftrackId'] = entity['id']
- data['entityType'] = entity_type
-
- ent_types_query = "select id, name from ObjectType"
- ent_types = session.query(ent_types_query).all()
- ent_types_by_name = {
- ent_type["name"]: ent_type["id"] for ent_type in ent_types
- }
-
- for cust_attr in custom_attributes:
- # skip hierarchical attributes
- if cust_attr.get('is_hierarchical', False):
- continue
-
- key = cust_attr['key']
- if cust_attr['entity_type'].lower() in ['asset']:
- data[key] = entity['custom_attributes'][key]
-
- elif (
- cust_attr['entity_type'].lower() in ['show'] and
- entity_type.lower() == 'project'
- ):
- data[key] = entity['custom_attributes'][key]
-
- elif (
- cust_attr['entity_type'].lower() in ['task'] and
- entity_type.lower() != 'project'
- ):
- # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build')
- entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type)
- # Get object id of entity type
- ent_obj_type_id = ent_types_by_name.get(entity_type_full)
-
- # Backup soluction when id is not found by prequeried objects
- if not ent_obj_type_id:
- query = 'ObjectType where name is "{}"'.format(
- entity_type_full
- )
- ent_obj_type_id = session.query(query).one()['id']
-
- if cust_attr['object_type_id'] == ent_obj_type_id:
- if key in entity['custom_attributes']:
- data[key] = entity['custom_attributes'][key]
-
- if entity_type in ['Project']:
- data['code'] = entity['name']
- return data
-
- # Get info for 'Data' in Avalon DB
- tasks = []
- for child in entity['children']:
- if child.entity_type in ['Task']:
- tasks.append(child['name'])
-
- # Get list of parents without project
- parents = []
- folderStruct = []
- for i in range(1, len(entity['link'])-1):
- parEnt = session.get(
- entity['link'][i]['type'],
- entity['link'][i]['id']
- )
- parName = parEnt['name']
- folderStruct.append(parName)
- parents.append(parEnt)
-
- parentId = None
-
- for parent in parents:
- parentId = database[project_name].find_one(
- {'type': 'asset', 'name': parName}
- )['_id']
- if parent['parent'].entity_type != 'project' and parentId is None:
- import_to_avalon(
- session, parent, ft_project, av_project, custom_attributes
- )
- parentId = database[project_name].find_one(
- {'type': 'asset', 'name': parName}
- )['_id']
-
- hierarchy = ""
- if len(folderStruct) > 0:
- hierarchy = os.path.sep.join(folderStruct)
-
- data['visualParent'] = parentId
- data['parents'] = folderStruct
- data['tasks'] = tasks
- data['hierarchy'] = hierarchy
-
- return data
-
-
-def get_avalon_project(ft_project):
- database = get_avalon_database()
- project_name = ft_project['full_name']
- ca_mongoid = get_ca_mongoid()
- if ca_mongoid not in ft_project['custom_attributes']:
- return None
-
- # try to find by Id
- project_id = ft_project['custom_attributes'][ca_mongoid]
- try:
- avalon_project = database[project_name].find_one({
- '_id': ObjectId(project_id)
- })
- except Exception:
- avalon_project = None
-
- if avalon_project is None:
- avalon_project = database[project_name].find_one({
- 'type': 'project'
- })
-
- return avalon_project
-
-
-def get_avalon_project_template():
+def get_avalon_project_template(project_name):
"""Get avalon template
-
Returns:
dictionary with templates
"""
- templates = Anatomy().templates
+ templates = Anatomy(project_name).templates
return {
- 'workfile': templates["avalon"]["workfile"],
- 'work': templates["avalon"]["work"],
- 'publish': templates["avalon"]["publish"]
+ "workfile": templates["avalon"]["workfile"],
+ "work": templates["avalon"]["work"],
+ "publish": templates["avalon"]["publish"]
}
-def get_project_config(entity):
- proj_config = {}
- proj_config['schema'] = 'avalon-core:config-1.0'
- proj_config['tasks'] = get_tasks(entity)
- proj_config['apps'] = get_project_apps(entity)
- proj_config['template'] = get_avalon_project_template()
-
- return proj_config
-
-
-def get_tasks(project):
- task_types = project['project_schema']['_task_type_schema']['types']
- return [{'name': task_type['name']} for task_type in task_types]
-
-
-def get_project_apps(entity):
- """ Get apps from project
- Requirements:
- 'Entity' MUST be object of ftrack entity with entity_type 'Project'
- Checking if app from ftrack is available in Templates/bin/{app_name}.toml
-
- Returns:
- Array with dictionaries with app Name and Label
- """
+def get_project_apps(in_app_list):
apps = []
- for app in entity['custom_attributes']['applications']:
+ # TODO report
+ missing_toml_msg = "Missing config file for application"
+ error_msg = (
+ "Unexpected error happend during preparation of application"
+ )
+ warnings = collections.defaultdict(list)
+ for app in in_app_list:
try:
toml_path = avalon.lib.which_app(app)
if not toml_path:
- log.warning((
- 'Missing config file for application "{}"'
- ).format(app))
+ log.warning(missing_toml_msg + ' "{}"'.format(app))
+ warnings[missing_toml_msg].append(app)
continue
apps.append({
- 'name': app,
- 'label': toml.load(toml_path)['label']
+ "name": app,
+ "label": toml.load(toml_path)["label"]
+ })
+ except Exception:
+ warnings[error_msg].append(app)
+ log.warning((
+ "Error has happened during preparing application \"{}\""
+ ).format(app), exc_info=True)
+ return apps, warnings
+
+
+def get_hierarchical_attributes(session, entity, attr_names, attr_defaults={}):
+ entity_ids = []
+ if entity.entity_type.lower() == "project":
+ entity_ids.append(entity["id"])
+ else:
+ typed_context = session.query((
+ "select ancestors.id, project from TypedContext where id is \"{}\""
+ ).format(entity["id"])).one()
+ entity_ids.append(typed_context["id"])
+ entity_ids.extend(
+ [ent["id"] for ent in reversed(typed_context["ancestors"])]
+ )
+ entity_ids.append(typed_context["project"]["id"])
+
+ missing_defaults = []
+ for attr_name in attr_names:
+ if attr_name not in attr_defaults:
+ missing_defaults.append(attr_name)
+
+ join_ent_ids = ", ".join(
+ ["\"{}\"".format(entity_id) for entity_id in entity_ids]
+ )
+ join_attribute_names = ", ".join(
+ ["\"{}\"".format(key) for key in attr_names]
+ )
+ queries = []
+ queries.append({
+ "action": "query",
+ "expression": (
+ "select value, entity_id from CustomAttributeValue "
+ "where entity_id in ({}) and configuration.key in ({})"
+ ).format(join_ent_ids, join_attribute_names)
+ })
+
+ if not missing_defaults:
+ if hasattr(session, "call"):
+ [values] = session.call(queries)
+ else:
+ [values] = session._call(queries)
+ else:
+ join_missing_names = ", ".join(
+ ["\"{}\"".format(key) for key in missing_defaults]
+ )
+ queries.append({
+ "action": "query",
+ "expression": (
+ "select default from CustomAttributeConfiguration "
+ "where key in ({})"
+ ).format(join_missing_names)
+ })
+
+ [values, default_values] = session.call(queries)
+ for default_value in default_values:
+ key = default_value["data"][0]["key"]
+ attr_defaults[key] = default_value["data"][0]["default"]
+
+ hier_values = {}
+ for key, val in attr_defaults.items():
+ hier_values[key] = val
+
+ if not values["data"]:
+ return hier_values
+
+ _hier_values = collections.defaultdict(list)
+ for value in values["data"]:
+ key = value["configuration"]["key"]
+ _hier_values[key].append(value)
+
+ for key, values in _hier_values.items():
+ value = sorted(
+ values, key=lambda value: entity_ids.index(value["entity_id"])
+ )[0]
+ hier_values[key] = value["value"]
+
+ return hier_values
+
+
+class SyncEntitiesFactory:
+ dbcon = DbConnector()
+
+ project_query = (
+ "select full_name, name, custom_attributes"
+ ", project_schema._task_type_schema.types.name"
+ " from Project where full_name is \"{}\""
+ )
+ entities_query = (
+ "select id, name, parent_id, link"
+ " from TypedContext where project_id is \"{}\""
+ )
+ ignore_custom_attr_key = "avalon_ignore_sync"
+
+ report_splitter = {"type": "label", "value": "---"}
+
+ def __init__(self, log_obj, session):
+ self.log = log_obj
+ self._server_url = session.server_url
+ self._api_key = session.api_key
+ self._api_user = session.api_user
+
+ def launch_setup(self, project_full_name):
+ try:
+ self.session.close()
+ except Exception:
+ pass
+
+ self.session = ftrack_api.Session(
+ server_url=self._server_url,
+ api_key=self._api_key,
+ api_user=self._api_user,
+ auto_connect_event_hub=True
+ )
+
+ self.duplicates = {}
+ self.failed_regex = {}
+ self.tasks_failed_regex = collections.defaultdict(list)
+ self.report_items = {
+ "info": collections.defaultdict(list),
+ "warning": collections.defaultdict(list),
+ "error": collections.defaultdict(list)
+ }
+
+ self.create_list = []
+ self.updates = collections.defaultdict(dict)
+
+ self.avalon_project = None
+ self.avalon_entities = None
+
+ self._avalon_ents_by_id = None
+ self._avalon_ents_by_ftrack_id = None
+ self._avalon_ents_by_name = None
+ self._avalon_ents_by_parent_id = None
+
+ self._avalon_archived_ents = None
+ self._avalon_archived_by_id = None
+ self._avalon_archived_by_parent_id = None
+ self._avalon_archived_by_name = None
+
+ self._subsets_by_parent_id = None
+ self._changeability_by_mongo_id = None
+
+ self.all_filtered_entities = {}
+ self.filtered_ids = []
+ self.not_selected_ids = []
+
+ self._ent_paths_by_ftrack_id = {}
+
+ self.ftrack_avalon_mapper = None
+ self.avalon_ftrack_mapper = None
+ self.create_ftrack_ids = None
+ self.update_ftrack_ids = None
+ self.deleted_entities = None
+
+ # Get Ftrack project
+ ft_project = self.session.query(
+ self.project_query.format(project_full_name)
+ ).one()
+ ft_project_id = ft_project["id"]
+
+ # Skip if project is ignored
+ if ft_project["custom_attributes"].get(
+ self.ignore_custom_attr_key
+ ) is True:
+ msg = (
+ "Project \"{}\" has set `Ignore Sync` custom attribute to True"
+ ).format(project_full_name)
+ self.log.warning(msg)
+ return {"success": False, "message": msg}
+
+ # Check if `avalon_mongo_id` custom attribute exist or is accessible
+ if CustAttrIdKey not in ft_project["custom_attributes"]:
+ items = []
+ items.append({
+ "type": "label",
+ "value": "# Can't access Custom attribute <{}>".format(
+ CustAttrIdKey
+ )
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "- Check if user \"{}\" has permissions"
+ " to access the Custom attribute
"
+ ).format(self._api_key)
+ })
+ items.append({
+ "type": "label",
+ "value": "- Check if the Custom attribute exist
"
+ })
+ return {
+ "items": items,
+ "title": "Synchronization failed",
+ "success": False,
+ "message": "Synchronization failed"
+ }
+
+ # Find all entities in project
+ all_project_entities = self.session.query(
+ self.entities_query.format(ft_project_id)
+ ).all()
+
+ # Store entities by `id` and `parent_id`
+ entities_dict = collections.defaultdict(lambda: {
+ "children": list(),
+ "parent_id": None,
+ "entity": None,
+ "entity_type": None,
+ "name": None,
+ "custom_attributes": {},
+ "hier_attrs": {},
+ "avalon_attrs": {},
+ "tasks": []
+ })
+
+ for entity in all_project_entities:
+ parent_id = entity["parent_id"]
+ entity_type = entity.entity_type
+ entity_type_low = entity_type.lower()
+ if entity_type_low == "task":
+ entities_dict[parent_id]["tasks"].append(entity["name"])
+ continue
+
+ entity_id = entity["id"]
+ entities_dict[entity_id].update({
+ "entity": entity,
+ "parent_id": parent_id,
+ "entity_type": entity_type_low,
+ "entity_type_orig": entity_type,
+ "name": entity["name"]
+ })
+ entities_dict[parent_id]["children"].append(entity_id)
+
+ entities_dict[ft_project_id]["entity"] = ft_project
+ entities_dict[ft_project_id]["entity_type"] = (
+ ft_project.entity_type.lower()
+ )
+ entities_dict[ft_project_id]["entity_type_orig"] = (
+ ft_project.entity_type
+ )
+ entities_dict[ft_project_id]["name"] = ft_project["full_name"]
+
+ self.ft_project_id = ft_project_id
+ self.entities_dict = entities_dict
+
+ @property
+ def avalon_ents_by_id(self):
+ if self._avalon_ents_by_id is None:
+ self._avalon_ents_by_id = {}
+ for entity in self.avalon_entities:
+ self._avalon_ents_by_id[str(entity["_id"])] = entity
+
+ return self._avalon_ents_by_id
+
+ @property
+ def avalon_ents_by_ftrack_id(self):
+ if self._avalon_ents_by_ftrack_id is None:
+ self._avalon_ents_by_ftrack_id = {}
+ for entity in self.avalon_entities:
+ key = entity.get("data", {}).get("ftrackId")
+ if not key:
+ continue
+ self._avalon_ents_by_ftrack_id[key] = str(entity["_id"])
+
+ return self._avalon_ents_by_ftrack_id
+
+ @property
+ def avalon_ents_by_name(self):
+ if self._avalon_ents_by_name is None:
+ self._avalon_ents_by_name = {}
+ for entity in self.avalon_entities:
+ self._avalon_ents_by_name[entity["name"]] = str(entity["_id"])
+
+ return self._avalon_ents_by_name
+
+ @property
+ def avalon_ents_by_parent_id(self):
+ if self._avalon_ents_by_parent_id is None:
+ self._avalon_ents_by_parent_id = collections.defaultdict(list)
+ for entity in self.avalon_entities:
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is not None:
+ parent_id = str(parent_id)
+ self._avalon_ents_by_parent_id[parent_id].append(entity)
+
+ return self._avalon_ents_by_parent_id
+
+ @property
+ def avalon_archived_ents(self):
+ if self._avalon_archived_ents is None:
+ self._avalon_archived_ents = [
+ ent for ent in self.dbcon.find({"type": "archived_asset"})
+ ]
+ return self._avalon_archived_ents
+
+ @property
+ def avalon_archived_by_name(self):
+ if self._avalon_archived_by_name is None:
+ self._avalon_archived_by_name = collections.defaultdict(list)
+ for ent in self.avalon_archived_ents:
+ self._avalon_archived_by_name[ent["name"]].append(ent)
+ return self._avalon_archived_by_name
+
+ @property
+ def avalon_archived_by_id(self):
+ if self._avalon_archived_by_id is None:
+ self._avalon_archived_by_id = {
+ str(ent["_id"]): ent for ent in self.avalon_archived_ents
+ }
+ return self._avalon_archived_by_id
+
+ @property
+ def avalon_archived_by_parent_id(self):
+ if self._avalon_archived_by_parent_id is None:
+ self._avalon_archived_by_parent_id = collections.defaultdict(list)
+ for entity in self.avalon_archived_ents:
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is not None:
+ parent_id = str(parent_id)
+ self._avalon_archived_by_parent_id[parent_id].append(entity)
+
+ return self._avalon_archived_by_parent_id
+
+ @property
+ def subsets_by_parent_id(self):
+ if self._subsets_by_parent_id is None:
+ self._subsets_by_parent_id = collections.defaultdict(list)
+ for subset in self.dbcon.find({"type": "subset"}):
+ self._subsets_by_parent_id[str(subset["parent"])].append(
+ subset
+ )
+
+ return self._subsets_by_parent_id
+
+ @property
+ def changeability_by_mongo_id(self):
+ if self._changeability_by_mongo_id is None:
+ self._changeability_by_mongo_id = collections.defaultdict(
+ lambda: True
+ )
+ self._changeability_by_mongo_id[self.avalon_project_id] = False
+ self._bubble_changeability(list(self.subsets_by_parent_id.keys()))
+ return self._changeability_by_mongo_id
+
+ @property
+ def all_ftrack_names(self):
+ return [
+ ent_dict["name"] for ent_dict in self.entities_dict.values() if (
+ ent_dict.get("name")
+ )
+ ]
+
+ def duplicity_regex_check(self):
+ self.log.debug("* Checking duplicities and invalid symbols")
+ # Duplicity and regex check
+ entity_ids_by_name = {}
+ duplicates = []
+ failed_regex = []
+ task_names = {}
+ _schema_patterns = {}
+ for ftrack_id, entity_dict in self.entities_dict.items():
+ regex_check = True
+ name = entity_dict["name"]
+ entity_type = entity_dict["entity_type"]
+ # Tasks must be checked too
+ for task_name in entity_dict["tasks"]:
+ passed = task_names.get(task_name)
+ if passed is None:
+ passed = check_regex(
+ task_name, "task", schema_patterns=_schema_patterns
+ )
+ task_names[task_name] = passed
+
+ if not passed:
+ self.tasks_failed_regex[task_name].append(ftrack_id)
+
+ if name in entity_ids_by_name:
+ duplicates.append(name)
+ else:
+ entity_ids_by_name[name] = []
+ regex_check = check_regex(
+ name, entity_type, schema_patterns=_schema_patterns
+ )
+
+ entity_ids_by_name[name].append(ftrack_id)
+ if not regex_check:
+ failed_regex.append(name)
+
+ for name in failed_regex:
+ self.failed_regex[name] = entity_ids_by_name[name]
+
+ for name in duplicates:
+ self.duplicates[name] = entity_ids_by_name[name]
+
+ self.filter_by_duplicate_regex()
+
+ def filter_by_duplicate_regex(self):
+ filter_queue = queue.Queue()
+ failed_regex_msg = "{} - Entity has invalid symbols in the name"
+ duplicate_msg = "There are multiple entities with the name: \"{}\":"
+
+ for ids in self.failed_regex.values():
+ for id in ids:
+ ent_path = self.get_ent_path(id)
+ self.log.warning(failed_regex_msg.format(ent_path))
+ filter_queue.put(id)
+
+ for name, ids in self.duplicates.items():
+ self.log.warning(duplicate_msg.format(name))
+ for id in ids:
+ ent_path = self.get_ent_path(id)
+ self.log.warning(ent_path)
+ filter_queue.put(id)
+
+ filtered_ids = []
+ while not filter_queue.empty():
+ ftrack_id = filter_queue.get()
+ if ftrack_id in filtered_ids:
+ continue
+
+ entity_dict = self.entities_dict.pop(ftrack_id, {})
+ if not entity_dict:
+ continue
+
+ self.all_filtered_entities[ftrack_id] = entity_dict
+ parent_id = entity_dict.get("parent_id")
+ if parent_id and parent_id in self.entities_dict:
+ if ftrack_id in self.entities_dict[parent_id]["children"]:
+ self.entities_dict[parent_id]["children"].remove(ftrack_id)
+
+ filtered_ids.append(ftrack_id)
+ for child_id in entity_dict.get("children", []):
+ filter_queue.put(child_id)
+
+ for name, ids in self.tasks_failed_regex.items():
+ for id in ids:
+ if id not in self.entities_dict:
+ continue
+ self.entities_dict[id]["tasks"].remove(name)
+ ent_path = self.get_ent_path(id)
+ self.log.warning(failed_regex_msg.format(
+ "/".join([ent_path, name])
+ ))
+
+ def filter_by_ignore_sync(self):
+ # skip filtering if `ignore_sync` attribute do not exist
+ if self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
+ self.ignore_custom_attr_key, "_notset_"
+ ) == "_notset_":
+ return
+
+ self.filter_queue = queue.Queue()
+ self.filter_queue.put((self.ft_project_id, False))
+ while not self.filter_queue.empty():
+ parent_id, remove = self.filter_queue.get()
+ if remove:
+ parent_dict = self.entities_dict.pop(parent_id, {})
+ self.all_filtered_entities[parent_id] = parent_dict
+ self.filtered_ids.append(parent_id)
+ else:
+ parent_dict = self.entities_dict.get(parent_id, {})
+
+ for child_id in parent_dict.get("children", []):
+ # keep original `remove` value for all childs
+ _remove = (remove is True)
+ if not _remove:
+ if self.entities_dict[child_id]["avalon_attrs"].get(
+ self.ignore_custom_attr_key
+ ):
+ self.entities_dict[parent_id]["children"].remove(
+ child_id
+ )
+ _remove = True
+ self.filter_queue.put((child_id, _remove))
+
+ def filter_by_selection(self, event):
+ # BUGGY!!!! cause that entities are in deleted list
+ # TODO may be working when filtering happen after preparations
+ # - But this part probably does not have any functional reason
+ # - Time of synchronization probably won't be changed much
+ selected_ids = []
+ for entity in event["data"]["selection"]:
+ # Skip if project is in selection
+ if entity["entityType"] == "show":
+ return
+ selected_ids.append(entity["entityId"])
+
+ sync_ids = [self.ft_project_id]
+ parents_queue = queue.Queue()
+ children_queue = queue.Queue()
+ for id in selected_ids:
+ # skip if already filtered with ignore sync custom attribute
+ if id in self.filtered_ids:
+ continue
+
+ parents_queue.put(id)
+ children_queue.put(id)
+
+ while not parents_queue.empty():
+ id = parents_queue.get()
+ while True:
+ # Stops when parent is in sync_ids
+ if id in self.filtered_ids or id in sync_ids or id is None:
+ break
+ sync_ids.append(id)
+ id = self.entities_dict[id]["parent_id"]
+
+ while not children_queue.empty():
+ parent_id = children_queue.get()
+ for child_id in self.entities_dict[parent_id]["children"]:
+ if child_id in sync_ids or child_id in self.filtered_ids:
+ continue
+ sync_ids.append(child_id)
+ children_queue.put(child_id)
+
+ # separate not selected and to process entities
+ for key, value in self.entities_dict.items():
+ if key not in sync_ids:
+ self.not_selected_ids.append(key)
+
+ for id in self.not_selected_ids:
+ # pop from entities
+ value = self.entities_dict.pop(id)
+ # remove entity from parent's children
+ parent_id = value["parent_id"]
+ if parent_id not in sync_ids:
+ continue
+
+ self.entities_dict[parent_id]["children"].remove(id)
+
+ def set_cutom_attributes(self):
+ self.log.debug("* Preparing custom attributes")
+ # Get custom attributes and values
+ custom_attrs, hier_attrs = get_avalon_attr(self.session)
+ ent_types = self.session.query("select id, name from ObjectType").all()
+ ent_types_by_name = {
+ ent_type["name"]: ent_type["id"] for ent_type in ent_types
+ }
+
+ attrs = set()
+ # store default values per entity type
+ attrs_per_entity_type = collections.defaultdict(dict)
+ avalon_attrs = collections.defaultdict(dict)
+ # store also custom attribute configuration id for future use (create)
+ attrs_per_entity_type_ca_id = collections.defaultdict(dict)
+ avalon_attrs_ca_id = collections.defaultdict(dict)
+
+ for cust_attr in custom_attrs:
+ key = cust_attr["key"]
+ attrs.add(cust_attr["id"])
+ ca_ent_type = cust_attr["entity_type"]
+ if key.startswith("avalon_"):
+ if ca_ent_type == "show":
+ avalon_attrs[ca_ent_type][key] = cust_attr["default"]
+ avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"]
+ else:
+ obj_id = cust_attr["object_type_id"]
+ avalon_attrs[obj_id][key] = cust_attr["default"]
+ avalon_attrs_ca_id[obj_id][key] = cust_attr["id"]
+ continue
+
+ if ca_ent_type == "show":
+ attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"]
+ attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"]
+ else:
+ obj_id = cust_attr["object_type_id"]
+ attrs_per_entity_type[obj_id][key] = cust_attr["default"]
+ attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"]
+
+ obj_id_ent_type_map = {}
+ sync_ids = []
+ for entity_id, entity_dict in self.entities_dict.items():
+ sync_ids.append(entity_id)
+ entity_type = entity_dict["entity_type"]
+ entity_type_orig = entity_dict["entity_type_orig"]
+
+ if entity_type == "project":
+ attr_key = "show"
+ else:
+ map_key = obj_id_ent_type_map.get(entity_type_orig)
+ if not map_key:
+ # Put space between capitals
+ # (e.g. 'AssetBuild' -> 'Asset Build')
+ map_key = re.sub(
+ r"(\w)([A-Z])", r"\1 \2", entity_type_orig
+ )
+ obj_id_ent_type_map[entity_type_orig] = map_key
+
+ # Get object id of entity type
+ attr_key = ent_types_by_name.get(map_key)
+
+ # Backup soluction when id is not found by prequeried objects
+ if not attr_key:
+ query = "ObjectType where name is \"{}\"".format(map_key)
+ attr_key = self.session.query(query).one()["id"]
+ ent_types_by_name[map_key] = attr_key
+
+ prepared_attrs = attrs_per_entity_type.get(attr_key)
+ prepared_avalon_attr = avalon_attrs.get(attr_key)
+ prepared_attrs_ca_id = attrs_per_entity_type_ca_id.get(attr_key)
+ prepared_avalon_attr_ca_id = avalon_attrs_ca_id.get(attr_key)
+ if prepared_attrs:
+ self.entities_dict[entity_id]["custom_attributes"] = (
+ prepared_attrs.copy()
+ )
+ if prepared_attrs_ca_id:
+ self.entities_dict[entity_id]["custom_attributes_id"] = (
+ prepared_attrs_ca_id.copy()
+ )
+ if prepared_avalon_attr:
+ self.entities_dict[entity_id]["avalon_attrs"] = (
+ prepared_avalon_attr.copy()
+ )
+ if prepared_avalon_attr_ca_id:
+ self.entities_dict[entity_id]["avalon_attrs_id"] = (
+ prepared_avalon_attr_ca_id.copy()
+ )
+
+ # TODO query custom attributes by entity_id
+ entity_ids_joined = ", ".join([
+ "\"{}\"".format(id) for id in sync_ids
+ ])
+ attributes_joined = ", ".join([
+ "\"{}\"".format(name) for name in attrs
+ ])
+
+ cust_attr_query = (
+ "select value, entity_id from ContextCustomAttributeValue "
+ "where entity_id in ({}) and configuration_id in ({})"
+ )
+ call_expr = [{
+ "action": "query",
+ "expression": cust_attr_query.format(
+ entity_ids_joined, attributes_joined
+ )
+ }]
+ if hasattr(self.session, "call"):
+ [values] = self.session.call(call_expr)
+ else:
+ [values] = self.session._call(call_expr)
+
+ for value in values["data"]:
+ entity_id = value["entity_id"]
+ key = value["configuration"]["key"]
+ store_key = "custom_attributes"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ self.entities_dict[entity_id][store_key][key] = value["value"]
+
+ # process hierarchical attributes
+ self.set_hierarchical_attribute(hier_attrs, sync_ids)
+
+ def set_hierarchical_attribute(self, hier_attrs, sync_ids):
+ # collect all hierarchical attribute keys
+ # and prepare default values to project
+ attribute_names = []
+ attribute_ids = []
+ for attr in hier_attrs:
+ key = attr["key"]
+ attribute_ids.append(attr["id"])
+ attribute_names.append(key)
+
+ store_key = "hier_attrs"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+
+ self.entities_dict[self.ft_project_id][store_key][key] = (
+ attr["default"]
+ )
+
+ # Prepare dict with all hier keys and None values
+ prepare_dict = {}
+ prepare_dict_avalon = {}
+ for attr in attribute_names:
+ if attr.startswith("avalon_"):
+ prepare_dict_avalon[attr] = None
+ else:
+ prepare_dict[attr] = None
+
+ for id, entity_dict in self.entities_dict.items():
+ # Skip project because has stored defaults at the moment
+ if entity_dict["entity_type"] == "project":
+ continue
+ entity_dict["hier_attrs"] = prepare_dict.copy()
+ for key, val in prepare_dict_avalon.items():
+ entity_dict["avalon_attrs"][key] = val
+
+ # Prepare values to query
+ entity_ids_joined = ", ".join([
+ "\"{}\"".format(id) for id in sync_ids
+ ])
+ attributes_joined = ", ".join([
+ "\"{}\"".format(name) for name in attribute_ids
+ ])
+ call_expr = [{
+ "action": "query",
+ "expression": (
+ "select value, entity_id from CustomAttributeValue "
+ "where entity_id in ({}) and configuration_id in ({})"
+ ).format(entity_ids_joined, attributes_joined)
+ }]
+ if hasattr(self.session, "call"):
+ [values] = self.session.call(call_expr)
+ else:
+ [values] = self.session._call(call_expr)
+
+ avalon_hier = []
+ for value in values["data"]:
+ if value["value"] is None:
+ continue
+ entity_id = value["entity_id"]
+ key = value["configuration"]["key"]
+ store_key = "hier_attrs"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ avalon_hier.append(key)
+ self.entities_dict[entity_id][store_key][key] = value["value"]
+
+ # Get dictionary with not None hierarchical values to pull to childs
+ top_id = self.ft_project_id
+ project_values = {}
+ for key, value in self.entities_dict[top_id]["hier_attrs"].items():
+ if value is not None:
+ project_values[key] = value
+
+ for key in avalon_hier:
+ value = self.entities_dict[top_id]["avalon_attrs"][key]
+ if value is not None:
+ project_values[key] = value
+
+ hier_down_queue = queue.Queue()
+ hier_down_queue.put((project_values, top_id))
+
+ while not hier_down_queue.empty():
+ hier_values, parent_id = hier_down_queue.get()
+ for child_id in self.entities_dict[parent_id]["children"]:
+ _hier_values = hier_values.copy()
+ for name in attribute_names:
+ store_key = "hier_attrs"
+ if name.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ value = self.entities_dict[child_id][store_key][name]
+ if value is not None:
+ _hier_values[name] = value
+
+ self.entities_dict[child_id]["hier_attrs"].update(_hier_values)
+ hier_down_queue.put((_hier_values, child_id))
+
+ def remove_from_archived(self, mongo_id):
+ entity = self.avalon_archived_by_id.pop(mongo_id, None)
+ if not entity:
+ return
+
+ if self._avalon_archived_ents is not None:
+ if entity in self._avalon_archived_ents:
+ self._avalon_archived_ents.remove(entity)
+
+ if self._avalon_archived_by_name is not None:
+ name = entity["name"]
+ if name in self._avalon_archived_by_name:
+ name_ents = self._avalon_archived_by_name[name]
+ if entity in name_ents:
+ if len(name_ents) == 1:
+ self._avalon_archived_by_name.pop(name)
+ else:
+ self._avalon_archived_by_name[name].remove(entity)
+
+ # TODO use custom None instead of __NOTSET__
+ if self._avalon_archived_by_parent_id is not None:
+ parent_id = entity.get("data", {}).get(
+ "visualParent", "__NOTSET__"
+ )
+ if parent_id is not None:
+ parent_id = str(parent_id)
+
+ if parent_id in self._avalon_archived_by_parent_id:
+ parent_list = self._avalon_archived_by_parent_id[parent_id]
+ if entity not in parent_list:
+ self._avalon_archived_by_parent_id[parent_id].remove(
+ entity
+ )
+
+ def prepare_ftrack_ent_data(self):
+ not_set_ids = []
+ for id, entity_dict in self.entities_dict.items():
+ entity = entity_dict["entity"]
+ if entity is None:
+ not_set_ids.append(id)
+ continue
+
+ self.entities_dict[id]["final_entity"] = {}
+ self.entities_dict[id]["final_entity"]["name"] = (
+ entity_dict["name"]
+ )
+ data = {}
+ data["ftrackId"] = entity["id"]
+ data["entityType"] = entity_dict["entity_type_orig"]
+
+ for key, val in entity_dict.get("custom_attributes", []).items():
+ data[key] = val
+
+ for key, val in entity_dict.get("hier_attrs", []).items():
+ data[key] = val
+
+ if id == self.ft_project_id:
+ data["code"] = entity["name"]
+ self.entities_dict[id]["final_entity"]["data"] = data
+ self.entities_dict[id]["final_entity"]["type"] = "project"
+
+ proj_schema = entity["project_schema"]
+ task_types = proj_schema["_task_type_schema"]["types"]
+ proj_apps, warnings = get_project_apps(
+ (data.get("applications") or [])
+ )
+ for msg, items in warnings.items():
+ if not msg or not items:
+ continue
+ self.report_items["warning"][msg] = items
+
+ self.entities_dict[id]["final_entity"]["config"] = {
+ "tasks": [{"name": tt["name"]} for tt in task_types],
+ "apps": proj_apps
+ }
+ continue
+
+ ent_path_items = [ent["name"] for ent in entity["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ hierarchy = ""
+ if len(parents) > 0:
+ hierarchy = os.path.sep.join(parents)
+
+ data["parents"] = parents
+ data["hierarchy"] = hierarchy
+ data["tasks"] = self.entities_dict[id].pop("tasks", [])
+ self.entities_dict[id]["final_entity"]["data"] = data
+ self.entities_dict[id]["final_entity"]["type"] = "asset"
+
+ if not_set_ids:
+ self.log.debug((
+ "- Debug information: Filtering bug, there are empty dicts"
+ "in entities dict (functionality should not be affected) <{}>"
+ ).format("| ".join(not_set_ids)))
+ for id in not_set_ids:
+ self.entities_dict.pop(id)
+
+ def get_ent_path(self, ftrack_id):
+ ent_path = self._ent_paths_by_ftrack_id.get(ftrack_id)
+ if not ent_path:
+ entity = self.entities_dict[ftrack_id]["entity"]
+ ent_path = "/".join(
+ [ent["name"] for ent in entity["link"]]
+ )
+ self._ent_paths_by_ftrack_id[ftrack_id] = ent_path
+
+ return ent_path
+
+ def prepare_avalon_entities(self, ft_project_name):
+ self.log.debug((
+ "* Preparing avalon entities "
+ "(separate to Create, Update and Deleted groups)"
+ ))
+ # Avalon entities
+ self.dbcon.install()
+ self.dbcon.Session["AVALON_PROJECT"] = ft_project_name
+ avalon_project = self.dbcon.find_one({"type": "project"})
+ avalon_entities = self.dbcon.find({"type": "asset"})
+ self.avalon_project = avalon_project
+ self.avalon_entities = avalon_entities
+
+ ftrack_avalon_mapper = {}
+ avalon_ftrack_mapper = {}
+ create_ftrack_ids = []
+ update_ftrack_ids = []
+
+ same_mongo_id = []
+ all_mongo_ids = {}
+ for ftrack_id, entity_dict in self.entities_dict.items():
+ mongo_id = entity_dict["avalon_attrs"].get(CustAttrIdKey)
+ if not mongo_id:
+ continue
+ if mongo_id in all_mongo_ids:
+ same_mongo_id.append(mongo_id)
+ else:
+ all_mongo_ids[mongo_id] = []
+ all_mongo_ids[mongo_id].append(ftrack_id)
+
+ if avalon_project:
+ mongo_id = str(avalon_project["_id"])
+ ftrack_avalon_mapper[self.ft_project_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = self.ft_project_id
+ update_ftrack_ids.append(self.ft_project_id)
+ else:
+ create_ftrack_ids.append(self.ft_project_id)
+
+ # make it go hierarchically
+ prepare_queue = queue.Queue()
+
+ for child_id in self.entities_dict[self.ft_project_id]["children"]:
+ prepare_queue.put(child_id)
+
+ while not prepare_queue.empty():
+ ftrack_id = prepare_queue.get()
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ prepare_queue.put(child_id)
+
+ entity_dict = self.entities_dict[ftrack_id]
+ ent_path = self.get_ent_path(ftrack_id)
+
+ mongo_id = entity_dict["avalon_attrs"].get(CustAttrIdKey)
+ av_ent_by_mongo_id = self.avalon_ents_by_id.get(mongo_id)
+ if av_ent_by_mongo_id:
+ av_ent_ftrack_id = av_ent_by_mongo_id.get("data", {}).get(
+ "ftrackId"
+ )
+ is_right = False
+ else_match_better = False
+ if av_ent_ftrack_id and av_ent_ftrack_id == ftrack_id:
+ is_right = True
+
+ elif mongo_id not in same_mongo_id:
+ is_right = True
+
+ else:
+ ftrack_ids_with_same_mongo = all_mongo_ids[mongo_id]
+ for _ftrack_id in ftrack_ids_with_same_mongo:
+ if _ftrack_id == av_ent_ftrack_id:
+ continue
+
+ _entity_dict = self.entities_dict[_ftrack_id]
+ _mongo_id = _entity_dict["avalon_attrs"][CustAttrIdKey]
+ _av_ent_by_mongo_id = self.avalon_ents_by_id.get(
+ _mongo_id
+ )
+ _av_ent_ftrack_id = _av_ent_by_mongo_id.get(
+ "data", {}
+ ).get("ftrackId")
+ if _av_ent_ftrack_id == ftrack_id:
+ else_match_better = True
+ break
+
+ if not is_right and not else_match_better:
+ entity = entity_dict["entity"]
+ ent_path_items = [ent["name"] for ent in entity["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ av_parents = av_ent_by_mongo_id["data"]["parents"]
+ if av_parents == parents:
+ is_right = True
+ else:
+ name = entity_dict["name"]
+ av_name = av_ent_by_mongo_id["name"]
+ if name == av_name:
+ is_right = True
+
+ if is_right:
+ self.log.debug(
+ "Existing (by MongoID) <{}>".format(ent_path)
+ )
+ ftrack_avalon_mapper[ftrack_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = ftrack_id
+ update_ftrack_ids.append(ftrack_id)
+ continue
+
+ mongo_id = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if not mongo_id:
+ mongo_id = self.avalon_ents_by_name.get(entity_dict["name"])
+ if mongo_id:
+ self.log.debug(
+ "Existing (by matching name) <{}>".format(ent_path)
+ )
+ else:
+ self.log.debug(
+ "Existing (by FtrackID in mongo) <{}>".format(ent_path)
+ )
+
+ if mongo_id:
+ ftrack_avalon_mapper[ftrack_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = ftrack_id
+ update_ftrack_ids.append(ftrack_id)
+ continue
+
+ self.log.debug("New <{}>".format(ent_path))
+ create_ftrack_ids.append(ftrack_id)
+
+ deleted_entities = []
+ for mongo_id in self.avalon_ents_by_id:
+ if mongo_id in avalon_ftrack_mapper:
+ continue
+ deleted_entities.append(mongo_id)
+
+ av_ent = self.avalon_ents_by_id[mongo_id]
+ av_ent_path_items = [p for p in av_ent["data"]["parents"]]
+ av_ent_path_items.append(av_ent["name"])
+ self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items)))
+
+ self.ftrack_avalon_mapper = ftrack_avalon_mapper
+ self.avalon_ftrack_mapper = avalon_ftrack_mapper
+ self.create_ftrack_ids = create_ftrack_ids
+ self.update_ftrack_ids = update_ftrack_ids
+ self.deleted_entities = deleted_entities
+
+ self.log.debug((
+ "Ftrack -> Avalon comparison: New <{}> "
+ "| Existing <{}> | Deleted <{}>"
+ ).format(
+ len(create_ftrack_ids),
+ len(update_ftrack_ids),
+ len(deleted_entities)
+ ))
+
+ def filter_with_children(self, ftrack_id):
+ if ftrack_id not in self.entities_dict:
+ return
+ ent_dict = self.entities_dict[ftrack_id]
+ parent_id = ent_dict["parent_id"]
+ self.entities_dict[parent_id]["children"].remove(ftrack_id)
+
+ children_queue = queue.Queue()
+ children_queue.put(ftrack_id)
+ while not children_queue.empty():
+ _ftrack_id = children_queue.get()
+ entity_dict = self.entities_dict.pop(_ftrack_id, {"children": []})
+ for child_id in entity_dict["children"]:
+ children_queue.put(child_id)
+
+ def prepare_changes(self):
+ self.log.debug("* Preparing changes for avalon/ftrack")
+ hierarchy_changing_ids = []
+ ignore_keys = collections.defaultdict(list)
+
+ update_queue = queue.Queue()
+ for ftrack_id in self.update_ftrack_ids:
+ update_queue.put(ftrack_id)
+
+ while not update_queue.empty():
+ ftrack_id = update_queue.get()
+ if ftrack_id == self.ft_project_id:
+ changes = self.prepare_project_changes()
+ if changes:
+ self.updates[self.avalon_project_id] = changes
+ continue
+
+ ftrack_ent_dict = self.entities_dict[ftrack_id]
+
+ # *** check parents
+ parent_check = False
+
+ ftrack_parent_id = ftrack_ent_dict["parent_id"]
+ avalon_id = self.ftrack_avalon_mapper[ftrack_id]
+ avalon_entity = self.avalon_ents_by_id[avalon_id]
+ avalon_parent_id = avalon_entity["data"]["visualParent"]
+ if avalon_parent_id is not None:
+ avalon_parent_id = str(avalon_parent_id)
+
+ ftrack_parent_mongo_id = self.ftrack_avalon_mapper[
+ ftrack_parent_id
+ ]
+
+ # if parent is project
+ if (ftrack_parent_mongo_id == avalon_parent_id) or (
+ ftrack_parent_id == self.ft_project_id and
+ avalon_parent_id is None
+ ):
+ parent_check = True
+
+ # check name
+ ftrack_name = ftrack_ent_dict["name"]
+ avalon_name = avalon_entity["name"]
+ name_check = ftrack_name == avalon_name
+
+ # IDEAL STATE: both parent and name check passed
+ if parent_check and name_check:
+ continue
+
+ # If entity is changeable then change values of parent or name
+ if self.changeability_by_mongo_id[avalon_id]:
+ # TODO logging
+ if not parent_check:
+ if ftrack_parent_mongo_id == str(self.avalon_project_id):
+ new_parent_name = self.entities_dict[
+ self.ft_project_id]["name"]
+ new_parent_id = None
+ else:
+ new_parent_name = self.avalon_ents_by_id[
+ ftrack_parent_mongo_id]["name"]
+ new_parent_id = ObjectId(ftrack_parent_mongo_id)
+
+ if avalon_parent_id == str(self.avalon_project_id):
+ old_parent_name = self.entities_dict[
+ self.ft_project_id]["name"]
+ else:
+ old_parent_name = self.avalon_ents_by_id[
+ ftrack_parent_mongo_id]["name"]
+
+ self.updates[avalon_id]["data"] = {
+ "visualParent": new_parent_id
+ }
+ ignore_keys[ftrack_id].append("data.visualParent")
+ self.log.debug((
+ "Avalon entity \"{}\" changed parent \"{}\" -> \"{}\""
+ ).format(avalon_name, old_parent_name, new_parent_name))
+
+ if not name_check:
+ self.updates[avalon_id]["name"] = ftrack_name
+ ignore_keys[ftrack_id].append("name")
+ self.log.debug(
+ "Avalon entity \"{}\" was renamed to \"{}\"".format(
+ avalon_name, ftrack_name
+ )
+ )
+ continue
+
+ # parents and hierarchy must be recalculated
+ hierarchy_changing_ids.append(ftrack_id)
+
+ # Parent is project if avalon_parent_id is set to None
+ if avalon_parent_id is None:
+ avalon_parent_id = str(self.avalon_project_id)
+
+ if not name_check:
+ ent_path = self.get_ent_path(ftrack_id)
+ # TODO report
+ # TODO logging
+ self.entities_dict[ftrack_id]["name"] = avalon_name
+ self.entities_dict[ftrack_id]["entity"]["name"] = (
+ avalon_name
+ )
+ self.entities_dict[ftrack_id]["final_entity"]["name"] = (
+ avalon_name
+ )
+ self.log.warning("Name was changed back to {} <{}>".format(
+ avalon_name, ent_path
+ ))
+ self._ent_paths_by_ftrack_id.pop(ftrack_id, None)
+ msg = (
+ " It is not possible to change"
+ " the name of an entity or it's parents, "
+ " if it already contained published data."
+ )
+ self.report_items["warning"][msg].append(ent_path)
+
+ # skip parent oricessing if hierarchy didn't change
+ if parent_check:
+ continue
+
+ # Logic when parenting(hierarchy) has changed and should not
+ old_ftrack_parent_id = self.avalon_ftrack_mapper.get(
+ avalon_parent_id
+ )
+
+ # If last ftrack parent id from mongo entity exist then just
+ # remap paren_id on entity
+ if old_ftrack_parent_id:
+ # TODO report
+ # TODO logging
+ ent_path = self.get_ent_path(ftrack_id)
+ msg = (
+ " It is not possible"
+ " to change the hierarchy of an entity or it's parents,"
+ " if it already contained published data."
+ )
+ self.report_items["warning"][msg].append(ent_path)
+ self.log.warning((
+ " Entity contains published data so it was moved"
+ " back to it's original hierarchy <{}>"
+ ).format(ent_path))
+ self.entities_dict[ftrack_id]["entity"]["parent_id"] = (
+ old_ftrack_parent_id
+ )
+ self.entities_dict[ftrack_id]["parent_id"] = (
+ old_ftrack_parent_id
+ )
+ self.entities_dict[old_ftrack_parent_id][
+ "children"
+ ].append(ftrack_id)
+
+ continue
+
+ old_parent_ent = self.avalon_ents_by_id.get(avalon_parent_id)
+ if not old_parent_ent:
+ old_parent_ent = self.avalon_archived_by_id.get(
+ avalon_parent_id
+ )
+
+ # TODO report
+ # TODO logging
+ if not old_parent_ent:
+ self.log.warning((
+ "Parent entity was not found by id"
+ " - Trying to find by parent name"
+ ))
+ ent_path = self.get_ent_path(ftrack_id)
+
+ parents = avalon_entity["data"]["parents"]
+ parent_name = parents[-1]
+ matching_entity_id = None
+ for id, entity_dict in self.entities_dict.items():
+ if entity_dict["name"] == parent_name:
+ matching_entity_id = id
+ break
+
+ if matching_entity_id is None:
+ # TODO logging
+ # TODO report (turn off auto-sync?)
+ self.log.error((
+ "The entity contains published data but it was moved"
+ " to a different place in the hierarchy and it's"
+ " previous parent cannot be found."
+ " It's impossible to solve this programmatically <{}>"
+ ).format(ent_path))
+ msg = (
+ " Hierarchy of an entity"
+ " can't be changed due to published data and missing"
+ " previous parent"
+ )
+ self.report_items["error"][msg].append(ent_path)
+ self.filter_with_children(ftrack_id)
+ continue
+
+ matching_ent_dict = self.entities_dict.get(matching_entity_id)
+ match_ent_parents = matching_ent_dict.get(
+ "final_entity", {}).get(
+ "data", {}).get(
+ "parents", ["__NOTSET__"]
+ )
+ # TODO logging
+ # TODO report
+ if (
+ len(match_ent_parents) >= len(parents) or
+ match_ent_parents[:-1] != parents
+ ):
+ ent_path = self.get_ent_path(ftrack_id)
+ self.log.error((
+ "The entity contains published data but it was moved"
+ " to a different place in the hierarchy and it's"
+ " previous parents were moved too."
+ " It's impossible to solve this programmatically <{}>"
+ ).format(ent_path))
+ msg = (
+ " Hierarchy of an entity"
+ " can't be changed due to published data and scrambled"
+ "hierarchy"
+ )
+ continue
+
+ old_parent_ent = matching_ent_dict["final_entity"]
+
+ parent_id = self.ft_project_id
+ entities_to_create = []
+ # TODO logging
+ self.log.warning(
+ "Ftrack entities must be recreated because they were deleted,"
+ " but they contain published data."
+ )
+
+ _avalon_ent = old_parent_ent
+
+ self.updates[avalon_parent_id] = {"type": "asset"}
+ success = True
+ while True:
+ _vis_par = _avalon_ent["data"]["visualParent"]
+ _name = _avalon_ent["name"]
+ if _name in self.all_ftrack_names:
+ av_ent_path_items = _avalon_ent["data"]["parents"]
+ av_ent_path_items.append(_name)
+ av_ent_path = "/".join(av_ent_path_items)
+ # TODO report
+ # TODO logging
+ self.log.error((
+ "Can't recreate the entity in Ftrack because an entity"
+ " with the same name already exists in a different"
+ " place in the hierarchy <{}>"
+ ).format(av_ent_path))
+ msg = (
+ " Hierarchy of an entity"
+ " can't be changed. I contains published data and it's"
+ " previous parent had a name, that is duplicated at a "
+ " different hierarchy level"
+ )
+ self.report_items["error"][msg].append(av_ent_path)
+ self.filter_with_children(ftrack_id)
+ success = False
+ break
+
+ entities_to_create.append(_avalon_ent)
+ if _vis_par is None:
+ break
+
+ _vis_par = str(_vis_par)
+ _mapped = self.avalon_ftrack_mapper.get(_vis_par)
+ if _mapped:
+ parent_id = _mapped
+ break
+
+ _avalon_ent = self.avalon_ents_by_id.get(_vis_par)
+ if not _avalon_ent:
+ _avalon_ent = self.avalon_archived_by_id.get(_vis_par)
+
+ if success is False:
+ continue
+
+ new_entity_id = None
+ for av_entity in reversed(entities_to_create):
+ new_entity_id = self.create_ftrack_ent_from_avalon_ent(
+ av_entity, parent_id
+ )
+ update_queue.put(new_entity_id)
+
+ if new_entity_id:
+ ftrack_ent_dict["entity"]["parent_id"] = new_entity_id
+
+ if hierarchy_changing_ids:
+ self.reload_parents(hierarchy_changing_ids)
+
+ for ftrack_id in self.update_ftrack_ids:
+ if ftrack_id == self.ft_project_id:
+ continue
+
+ avalon_id = self.ftrack_avalon_mapper[ftrack_id]
+ avalon_entity = self.avalon_ents_by_id[avalon_id]
+
+ avalon_attrs = self.entities_dict[ftrack_id]["avalon_attrs"]
+ if (
+ CustAttrIdKey not in avalon_attrs or
+ avalon_attrs[CustAttrIdKey] != avalon_id
+ ):
+ configuration_id = self.entities_dict[ftrack_id][
+ "avalon_attrs_id"][CustAttrIdKey]
+
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": ftrack_id
+ })
+
+ self.session.recorded_operations.push(
+ ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ ftrack_api.symbol.NOT_SET,
+ avalon_id
+ )
+ )
+ # check rest of data
+ data_changes = self.compare_dict(
+ self.entities_dict[ftrack_id]["final_entity"],
+ avalon_entity,
+ ignore_keys[ftrack_id]
+ )
+ if data_changes:
+ self.updates[avalon_id] = self.merge_dicts(
+ data_changes,
+ self.updates[avalon_id]
+ )
+
+ def synchronize(self):
+ self.log.debug("* Synchronization begins")
+ avalon_project_id = self.ftrack_avalon_mapper.get(self.ft_project_id)
+ if avalon_project_id:
+ self.avalon_project_id = ObjectId(avalon_project_id)
+
+ # remove filtered ftrack ids from create/update list
+ for ftrack_id in self.all_filtered_entities:
+ if ftrack_id in self.create_ftrack_ids:
+ self.create_ftrack_ids.remove(ftrack_id)
+ elif ftrack_id in self.update_ftrack_ids:
+ self.update_ftrack_ids.remove(ftrack_id)
+
+ self.log.debug("* Processing entities for archivation")
+ self.delete_entities()
+
+ self.log.debug("* Processing new entities")
+ # Create not created entities
+ for ftrack_id in self.create_ftrack_ids:
+ # CHECK it is possible that entity was already created
+ # because is parent of another entity which was processed first
+ if ftrack_id in self.ftrack_avalon_mapper:
+ continue
+ self.create_avalon_entity(ftrack_id)
+
+ if len(self.create_list) > 0:
+ self.dbcon.insert_many(self.create_list)
+
+ self.session.commit()
+
+ self.log.debug("* Processing entities for update")
+ self.prepare_changes()
+ self.update_entities()
+ self.session.commit()
+
+ def create_avalon_entity(self, ftrack_id):
+ if ftrack_id == self.ft_project_id:
+ self.create_avalon_project()
+ return
+
+ entity_dict = self.entities_dict[ftrack_id]
+ parent_ftrack_id = entity_dict["parent_id"]
+ avalon_parent = None
+ if parent_ftrack_id != self.ft_project_id:
+ avalon_parent = self.ftrack_avalon_mapper.get(parent_ftrack_id)
+ # if not avalon_parent:
+ # self.create_avalon_entity(parent_ftrack_id)
+ # avalon_parent = self.ftrack_avalon_mapper[parent_ftrack_id]
+ avalon_parent = ObjectId(avalon_parent)
+
+ # avalon_archived_by_id avalon_archived_by_name
+ current_id = (
+ entity_dict["avalon_attrs"].get(CustAttrIdKey) or ""
+ ).strip()
+ mongo_id = current_id
+ name = entity_dict["name"]
+
+ # Check if exist archived asset in mongo - by ID
+ unarchive = False
+ unarchive_id = self.check_unarchivation(ftrack_id, mongo_id, name)
+ if unarchive_id is not None:
+ unarchive = True
+ mongo_id = unarchive_id
+
+ item = entity_dict["final_entity"]
+ try:
+ new_id = ObjectId(mongo_id)
+ if mongo_id in self.avalon_ftrack_mapper:
+ new_id = ObjectId()
+ except InvalidId:
+ new_id = ObjectId()
+
+ item["_id"] = new_id
+ item["parent"] = self.avalon_project_id
+ item["schema"] = EntitySchemas["asset"]
+ item["data"]["visualParent"] = avalon_parent
+
+ new_id_str = str(new_id)
+ self.ftrack_avalon_mapper[ftrack_id] = new_id_str
+ self.avalon_ftrack_mapper[new_id_str] = ftrack_id
+
+ self._avalon_ents_by_id[new_id_str] = item
+ self._avalon_ents_by_ftrack_id[ftrack_id] = new_id_str
+ self._avalon_ents_by_name[item["name"]] = new_id_str
+
+ if current_id != new_id_str:
+ # store mongo id to ftrack entity
+ configuration_id = self.entities_dict[ftrack_id][
+ "avalon_attrs_id"
+ ][CustAttrIdKey]
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": ftrack_id
})
- except Exception as e:
- log.warning('Error with application {0} - {1}'.format(app, e))
- return apps
-
-
-def avalon_check_name(entity, in_schema=None):
- default_pattern = "^[a-zA-Z0-9_.]*$"
-
- name = entity["name"]
- schema_name = "asset-3.0"
-
- if in_schema:
- schema_name = in_schema
- elif entity.entity_type.lower() == "project":
- name = entity["full_name"]
- schema_name = "project-2.0"
-
- schema_obj = avalon.schema._cache.get(schema_name + ".json")
- name_pattern = schema_obj.get("properties", {}).get("name", {}).get(
- "pattern", default_pattern
- )
- if not re.match(name_pattern, name):
- msg = "\"{}\" includes unsupported symbols like \"dash\" or \"space\""
- raise ValueError(msg.format(name))
-
-
-def show_errors(obj, event, errors):
- title = 'Hey You! You raised few Errors! (*look below*)'
- items = []
- splitter = {'type': 'label', 'value': '---'}
- for error in errors:
- for key, message in error.items():
- error_title = {
- 'type': 'label',
- 'value': '# {}'.format(key)
- }
- error_message = {
- 'type': 'label',
- 'value': '{}
'.format(message)
- }
- if len(items) > 0:
- items.append(splitter)
- items.append(error_title)
- items.append(error_message)
- obj.log.error(
- '{}: {}'.format(key, message)
+ self.session.recorded_operations.push(
+ ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ ftrack_api.symbol.NOT_SET,
+ new_id_str
+ )
)
- obj.show_interface(items, title, event=event)
+
+ if unarchive is False:
+ self.create_list.append(item)
+ return
+ # If unarchive then replace entity data in database
+ self.dbcon.replace_one({"_id": new_id}, item)
+ self.remove_from_archived(mongo_id)
+ av_ent_path_items = item["data"]["parents"]
+ av_ent_path_items.append(item["name"])
+ av_ent_path = "/".join(av_ent_path_items)
+ self.log.debug("Entity was unarchived <{}>".format(av_ent_path))
+
+ def check_unarchivation(self, ftrack_id, mongo_id, name):
+ archived_by_id = self.avalon_archived_by_id.get(mongo_id)
+ archived_by_name = self.avalon_archived_by_name.get(name)
+
+ # if not found in archived then skip
+ if not archived_by_id and not archived_by_name:
+ return None
+
+ entity_dict = self.entities_dict[ftrack_id]
+
+ if archived_by_id:
+ # if is changeable then unarchive (nothing to check here)
+ if self.changeability_by_mongo_id[mongo_id]:
+ return mongo_id
+
+ # TODO replace `__NOTSET__` with custom None constant
+ archived_parent_id = archived_by_id["data"].get(
+ "visualParent", "__NOTSET__"
+ )
+ archived_parents = archived_by_id["data"].get("parents")
+ archived_name = archived_by_id["name"]
+
+ if (
+ archived_name != entity_dict["name"] or
+ archived_parents != entity_dict["final_entity"]["data"][
+ "parents"
+ ]
+ ):
+ return None
+
+ return mongo_id
+
+ # First check if there is any that have same parents
+ for archived in archived_by_name:
+ mongo_id = str(archived["_id"])
+ archived_parents = archived.get("data", {}).get("parents")
+ if (
+ archived_parents == entity_dict["final_entity"]["data"][
+ "parents"
+ ]
+ ):
+ return mongo_id
+
+ # Secondly try to find more close to current ftrack entity
+ first_changeable = None
+ for archived in archived_by_name:
+ mongo_id = str(archived["_id"])
+ if not self.changeability_by_mongo_id[mongo_id]:
+ continue
+
+ if first_changeable is None:
+ first_changeable = mongo_id
+
+ ftrack_parent_id = entity_dict["parent_id"]
+ map_ftrack_parent_id = self.ftrack_avalon_mapper.get(
+ ftrack_parent_id
+ )
+
+ # TODO replace `__NOTSET__` with custom None constant
+ archived_parent_id = archived.get("data", {}).get(
+ "visualParent", "__NOTSET__"
+ )
+ if archived_parent_id is not None:
+ archived_parent_id = str(archived_parent_id)
+
+ # skip if parent is archived - How this should be possible?
+ parent_entity = self.avalon_ents_by_id.get(archived_parent_id)
+ if (
+ parent_entity and (
+ map_ftrack_parent_id is not None and
+ map_ftrack_parent_id == str(parent_entity["_id"])
+ )
+ ):
+ return mongo_id
+ # Last return first changeable with same name (or None)
+ return first_changeable
+
+ def create_avalon_project(self):
+ project_item = self.entities_dict[self.ft_project_id]["final_entity"]
+ mongo_id = (
+ self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
+ CustAttrIdKey
+ ) or ""
+ ).strip()
+
+ try:
+ new_id = ObjectId(mongo_id)
+ except InvalidId:
+ new_id = ObjectId()
+
+ project_name = self.entities_dict[self.ft_project_id]["name"]
+ project_item["_id"] = new_id
+ project_item["parent"] = None
+ project_item["schema"] = EntitySchemas["project"]
+ project_item["config"]["schema"] = EntitySchemas["config"]
+ project_item["config"]["template"] = (
+ get_avalon_project_template(project_name)
+ )
+
+ self.ftrack_avalon_mapper[self.ft_project_id] = new_id
+ self.avalon_ftrack_mapper[new_id] = self.ft_project_id
+
+ self.avalon_project_id = new_id
+
+ self._avalon_ents_by_id[str(new_id)] = project_item
+ self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id)
+ self._avalon_ents_by_name[project_item["name"]] = str(new_id)
+
+ self.create_list.append(project_item)
+
+ # store mongo id to ftrack entity
+ entity = self.entities_dict[self.ft_project_id]["entity"]
+ entity["custom_attributes"][CustAttrIdKey] = str(new_id)
+
+ def _bubble_changeability(self, unchangeable_ids):
+ unchangeable_queue = queue.Queue()
+ for entity_id in unchangeable_ids:
+ unchangeable_queue.put((entity_id, False))
+
+ processed_parents_ids = []
+ subsets_to_remove = []
+ while not unchangeable_queue.empty():
+ entity_id, child_is_archived = unchangeable_queue.get()
+ # skip if already processed
+ if entity_id in processed_parents_ids:
+ continue
+
+ entity = self.avalon_ents_by_id.get(entity_id)
+ # if entity is not archived but unchageable child was then skip
+ # - archived entities should not affect not archived?
+ if entity and child_is_archived:
+ continue
+
+ # set changeability of current entity to False
+ self._changeability_by_mongo_id[entity_id] = False
+ processed_parents_ids.append(entity_id)
+ # if not entity then is probably archived
+ if not entity:
+ entity = self.avalon_archived_by_id.get(entity_id)
+ child_is_archived = True
+
+ if not entity:
+ # if entity is not found then it is subset without parent
+ if entity_id in unchangeable_ids:
+ subsets_to_remove.append(entity_id)
+ else:
+ # TODO logging - What is happening here?
+ self.log.warning((
+ "Avalon contains entities without valid parents that"
+ " lead to Project (should not cause errors)"
+ " - MongoId <{}>"
+ ).format(str(entity_id)))
+ continue
+
+ # skip if parent is project
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is None:
+ continue
+ unchangeable_queue.put((str(parent_id), child_is_archived))
+
+ self._delete_subsets_without_asset(subsets_to_remove)
+
+ def _delete_subsets_without_asset(self, not_existing_parents):
+ subset_ids = []
+ version_ids = []
+ repre_ids = []
+ to_delete = []
+
+ for parent_id in not_existing_parents:
+ subsets = self.subsets_by_parent_id.get(parent_id)
+ if not subsets:
+ continue
+ for subset in subsets:
+ if subset.get("type") != "subset":
+ continue
+ subset_ids.append(subset["_id"])
+
+ db_subsets = self.dbcon.find({
+ "_id": {"$in": subset_ids},
+ "type": "subset"
+ })
+ if not db_subsets:
+ return
+
+ db_versions = self.dbcon.find({
+ "parent": {"$in": subset_ids},
+ "type": "version"
+ })
+ if db_versions:
+ version_ids = [ver["_id"] for ver in db_versions]
+
+ db_repres = self.dbcon.find({
+ "parent": {"$in": version_ids},
+ "type": "representation"
+ })
+ if db_repres:
+ repre_ids = [repre["_id"] for repre in db_repres]
+
+ to_delete.extend(subset_ids)
+ to_delete.extend(version_ids)
+ to_delete.extend(repre_ids)
+
+ self.dbcon.delete_many({"_id": {"$in": to_delete}})
+
+ # Probably deprecated
+ def _check_changeability(self, parent_id=None):
+ for entity in self.avalon_ents_by_parent_id[parent_id]:
+ mongo_id = str(entity["_id"])
+ is_changeable = self._changeability_by_mongo_id.get(mongo_id)
+ if is_changeable is not None:
+ continue
+
+ self._check_changeability(mongo_id)
+ is_changeable = True
+ for child in self.avalon_ents_by_parent_id[parent_id]:
+ if not self._changeability_by_mongo_id[str(child["_id"])]:
+ is_changeable = False
+ break
+
+ if is_changeable is True:
+ is_changeable = (mongo_id in self.subsets_by_parent_id)
+ self._changeability_by_mongo_id[mongo_id] = is_changeable
+
+ def update_entities(self):
+ mongo_changes_bulk = []
+ for mongo_id, changes in self.updates.items():
+ filter = {"_id": ObjectId(mongo_id)}
+ change_data = from_dict_to_set(changes)
+ mongo_changes_bulk.append(UpdateOne(filter, change_data))
+
+ if not mongo_changes_bulk:
+ # TODO LOG
+ return
+ self.dbcon.bulk_write(mongo_changes_bulk)
+
+ def reload_parents(self, hierarchy_changing_ids):
+ parents_queue = queue.Queue()
+ parents_queue.put((self.ft_project_id, [], False))
+ while not parents_queue.empty():
+ ftrack_id, parent_parents, changed = parents_queue.get()
+ _parents = parent_parents.copy()
+ if ftrack_id not in hierarchy_changing_ids and not changed:
+ if ftrack_id != self.ft_project_id:
+ _parents.append(self.entities_dict[ftrack_id]["name"])
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ parents_queue.put((child_id, _parents, changed))
+ continue
+
+ changed = True
+ parents = [par for par in _parents]
+ hierarchy = "/".join(parents)
+ self.entities_dict[ftrack_id][
+ "final_entity"]["data"]["parents"] = parents
+ self.entities_dict[ftrack_id][
+ "final_entity"]["data"]["hierarchy"] = hierarchy
+
+ _parents.append(self.entities_dict[ftrack_id]["name"])
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ parents_queue.put((child_id, _parents, changed))
+
+ if ftrack_id in self.create_ftrack_ids:
+ mongo_id = self.ftrack_avalon_mapper[ftrack_id]
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
+ self.updates[mongo_id]["data"]["parents"] = parents
+ self.updates[mongo_id]["data"]["hierarchy"] = hierarchy
+
+ def prepare_project_changes(self):
+ ftrack_ent_dict = self.entities_dict[self.ft_project_id]
+ ftrack_entity = ftrack_ent_dict["entity"]
+ avalon_code = self.avalon_project["data"]["code"]
+ # TODO Is possible to sync if full name was changed?
+ # if ftrack_ent_dict["name"] != self.avalon_project["name"]:
+ # ftrack_entity["full_name"] = avalon_name
+ # self.entities_dict[self.ft_project_id]["name"] = avalon_name
+ # self.entities_dict[self.ft_project_id]["final_entity"][
+ # "name"
+ # ] = avalon_name
+
+ # TODO logging
+ # TODO report
+ # TODO May this happen? Is possible to change project code?
+ if ftrack_entity["name"] != avalon_code:
+ ftrack_entity["name"] = avalon_code
+ self.entities_dict[self.ft_project_id]["final_entity"]["data"][
+ "code"
+ ] = avalon_code
+ self.session.commit()
+ sub_msg = (
+ "Project code was changed back to \"{}\"".format(avalon_code)
+ )
+ msg = (
+ "It is not possible to change"
+ " project code after synchronization"
+ )
+ self.report_items["warning"][msg] = sub_msg
+ self.log.warning(sub_msg)
+
+ return self.compare_dict(
+ self.entities_dict[self.ft_project_id]["final_entity"],
+ self.avalon_project
+ )
+
+ def compare_dict(self, dict_new, dict_old, _ignore_keys=[]):
+ # _ignore_keys may be used for keys nested dict like"data.visualParent"
+ changes = {}
+ ignore_keys = []
+ for key_val in _ignore_keys:
+ key_items = key_val.split(".")
+ if len(key_items) == 1:
+ ignore_keys.append(key_items[0])
+
+ for key, value in dict_new.items():
+ if key in ignore_keys:
+ continue
+
+ if key not in dict_old:
+ changes[key] = value
+ continue
+
+ if isinstance(value, dict):
+ if not isinstance(dict_old[key], dict):
+ changes[key] = value
+ continue
+
+ _new_ignore_keys = []
+ for key_val in _ignore_keys:
+ key_items = key_val.split(".")
+ if len(key_items) <= 1:
+ continue
+ _new_ignore_keys.append(".".join(key_items[1:]))
+
+ _changes = self.compare_dict(
+ value, dict_old[key], _new_ignore_keys
+ )
+ if _changes:
+ changes[key] = _changes
+ continue
+
+ if value != dict_old[key]:
+ changes[key] = value
+
+ return changes
+
+ def merge_dicts(self, dict_new, dict_old):
+ for key, value in dict_new.items():
+ if key not in dict_old:
+ dict_old[key] = value
+ continue
+
+ if isinstance(value, dict):
+ dict_old[key] = self.merge_dicts(value, dict_old[key])
+ continue
+
+ dict_old[key] = value
+
+ return dict_old
+
+ def delete_entities(self):
+ if not self.deleted_entities:
+ return
+ # Try to order so child is not processed before parent
+ deleted_entities = []
+ _deleted_entities = [id for id in self.deleted_entities]
+
+ while True:
+ if not _deleted_entities:
+ break
+ _ready = []
+ for mongo_id in _deleted_entities:
+ ent = self.avalon_ents_by_id[mongo_id]
+ vis_par = ent["data"]["visualParent"]
+ if (
+ vis_par is not None and
+ str(vis_par) in self.deleted_entities
+ ):
+ continue
+ _ready.append(mongo_id)
+
+ for id in _ready:
+ deleted_entities.append(id)
+ _deleted_entities.remove(id)
+
+ delete_ids = []
+ for mongo_id in deleted_entities:
+ # delete if they are deletable
+ if self.changeability_by_mongo_id[mongo_id]:
+ delete_ids.append(ObjectId(mongo_id))
+ continue
+
+ # check if any new created entity match same entity
+ # - name and parents must match
+ deleted_entity = self.avalon_ents_by_id[mongo_id]
+ name = deleted_entity["name"]
+ parents = deleted_entity["data"]["parents"]
+ similar_ent_id = None
+ for ftrack_id in self.create_ftrack_ids:
+ _ent_final = self.entities_dict[ftrack_id]["final_entity"]
+ if _ent_final["name"] != name:
+ continue
+ if _ent_final["data"]["parents"] != parents:
+ continue
+
+ # If in create is "same" then we can "archive" current
+ # since will be unarchived in create method
+ similar_ent_id = ftrack_id
+ break
+
+ # If similar entity(same name and parents) is in create
+ # entities list then just change from create to update
+ if similar_ent_id is not None:
+ self.create_ftrack_ids.remove(similar_ent_id)
+ self.update_ftrack_ids.append(similar_ent_id)
+ self.avalon_ftrack_mapper[mongo_id] = similar_ent_id
+ self.ftrack_avalon_mapper[similar_ent_id] = mongo_id
+ continue
+
+ found_by_name_id = None
+ for ftrack_id, ent_dict in self.entities_dict.items():
+ if not ent_dict.get("name"):
+ continue
+
+ if name == ent_dict["name"]:
+ found_by_name_id = ftrack_id
+ break
+
+ if found_by_name_id is not None:
+ # * THESE conditins are too complex to implement in first stage
+ # - probably not possible to solve if this happen
+ # if found_by_name_id in self.create_ftrack_ids:
+ # # reparent entity of the new one create?
+ # pass
+ #
+ # elif found_by_name_id in self.update_ftrack_ids:
+ # found_mongo_id = self.ftrack_avalon_mapper[found_by_name_id]
+ #
+ # ent_dict = self.entities_dict[found_by_name_id]
+
+ # TODO report - CRITICAL entity with same name alread exists in
+ # different hierarchy - can't recreate entity
+ continue
+
+ _vis_parent = str(deleted_entity["data"]["visualParent"])
+ if _vis_parent is None:
+ _vis_parent = self.avalon_project_id
+ ftrack_parent_id = self.avalon_ftrack_mapper[_vis_parent]
+ self.create_ftrack_ent_from_avalon_ent(
+ deleted_entity, ftrack_parent_id
+ )
+
+ filter = {"_id": {"$in": delete_ids}, "type": "asset"}
+ self.dbcon.update_many(filter, {"$set": {"type": "archived_asset"}})
+
+ def create_ftrack_ent_from_avalon_ent(self, av_entity, parent_id):
+ new_entity = None
+ parent_entity = self.entities_dict[parent_id]["entity"]
+
+ _name = av_entity["name"]
+ _type = av_entity["data"].get("entityType", "folder")
+
+ self.log.debug((
+ "Re-ceating deleted entity {} <{}>"
+ ).format(_name, _type))
+
+ new_entity = self.session.create(_type, {
+ "name": _name,
+ "parent": parent_entity
+ })
+
+ final_entity = {}
+ for k, v in av_entity.items():
+ final_entity[k] = v
+
+ if final_entity.get("type") != "asset":
+ final_entity["type"] = "asset"
+
+ new_entity_id = new_entity["id"]
+ new_entity_data = {
+ "entity": new_entity,
+ "parent_id": parent_id,
+ "entity_type": _type.lower(),
+ "entity_type_orig": _type,
+ "name": _name,
+ "final_entity": final_entity
+ }
+ for k, v in new_entity_data.items():
+ self.entities_dict[new_entity_id][k] = v
+
+ p_chilren = self.entities_dict[parent_id]["children"]
+ if new_entity_id not in p_chilren:
+ self.entities_dict[parent_id]["children"].append(new_entity_id)
+
+ cust_attr, hier_attrs = get_avalon_attr(self.session)
+ for _attr in cust_attr:
+ key = _attr["key"]
+ if key not in av_entity["data"]:
+ continue
+
+ if key not in new_entity["custom_attributes"]:
+ continue
+
+ value = av_entity["data"][key]
+ if not value:
+ continue
+
+ new_entity["custom_attributes"][key] = value
+
+ av_entity_id = str(av_entity["_id"])
+ new_entity["custom_attributes"][CustAttrIdKey] = av_entity_id
+
+ self.ftrack_avalon_mapper[new_entity_id] = av_entity_id
+ self.avalon_ftrack_mapper[av_entity_id] = new_entity_id
+
+ self.session.commit()
+
+ ent_path = self.get_ent_path(new_entity_id)
+ msg = (
+ "Deleted entity was recreated because it or its children"
+ " contain published data"
+ )
+
+ self.report_items["info"][msg].append(ent_path)
+
+ return new_entity_id
+
+ def regex_duplicate_interface(self):
+ items = []
+ if self.failed_regex or self.tasks_failed_regex:
+ subtitle = "Entity names contain prohibited symbols:"
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: You can use Letters( a-Z ),"
+ " Numbers( 0-9 ) and Underscore( _ )
"
+ )
+ })
+ log_msgs = []
+ for name, ids in self.failed_regex.items():
+ error_title = {
+ "type": "label",
+ "value": "## {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
+
+ for name, ids in self.tasks_failed_regex.items():
+ error_title = {
+ "type": "label",
+ "value": "## Task: {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ ent_path = "/".join([ent_path, name])
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
+
+ self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
+
+ if self.duplicates:
+ subtitle = "Duplicated entity names:"
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: It is not allowed to use the same name"
+ " for multiple entities in the same project
"
+ )
+ })
+ log_msgs = []
+ for name, ids in self.duplicates.items():
+ error_title = {
+ "type": "label",
+ "value": "## {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ", ".join(paths)))
+
+ self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
+
+ return items
+
+ def report(self):
+ items = []
+ project_name = self.entities_dict[self.ft_project_id]["name"]
+ title = "Synchronization report ({}):".format(project_name)
+
+ keys = ["error", "warning", "info"]
+ for key in keys:
+ subitems = []
+ if key == "warning":
+ for _item in self.regex_duplicate_interface():
+ subitems.append(_item)
+
+ for msg, _items in self.report_items[key].items():
+ if not _items:
+ continue
+
+ subitems.append({
+ "type": "label",
+ "value": "# {}".format(msg)
+ })
+ if isinstance(_items, str):
+ _items = [_items]
+ subitems.append({
+ "type": "label",
+ "value": '{}
'.format("
".join(_items))
+ })
+
+ if items and subitems:
+ items.append(self.report_splitter)
+
+ items.extend(subitems)
+
+ return {
+ "items": items,
+ "title": title,
+ "success": False,
+ "message": "Synchronization Finished"
+ }
diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py
index 40294da230..4b57452961 100644
--- a/pype/ftrack/lib/ftrack_base_handler.py
+++ b/pype/ftrack/lib/ftrack_base_handler.py
@@ -2,7 +2,6 @@ import functools
import time
from pypeapp import Logger
import ftrack_api
-from ftrack_api import session as fa_session
from pype.ftrack.ftrack_server import session_processor
@@ -243,7 +242,7 @@ class BaseHandler(object):
_entities is None or
_entities[0].get(
'link', None
- ) == fa_session.ftrack_api.symbol.NOT_SET
+ ) == ftrack_api.symbol.NOT_SET
):
_entities = self._get_entities(event)
@@ -447,7 +446,7 @@ class BaseHandler(object):
'applicationId=ftrack.client.web and user.id="{0}"'
).format(user_id)
self.session.event_hub.publish(
- fa_session.ftrack_api.event.base.Event(
+ ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
data=dict(
type='message',
@@ -495,8 +494,8 @@ class BaseHandler(object):
if not user:
raise TypeError((
- 'Ftrack user with {} "{}" was not found!'.format(key, value)
- ))
+ 'Ftrack user with {} "{}" was not found!'
+ ).format(key, value))
user_id = user['id']
@@ -505,7 +504,7 @@ class BaseHandler(object):
).format(user_id)
self.session.event_hub.publish(
- fa_session.ftrack_api.event.base.Event(
+ ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
data=dict(
type='widget',
@@ -533,7 +532,7 @@ class BaseHandler(object):
else:
first = False
- subtitle = {'type': 'label', 'value':'{}
'.format(key)}
+ subtitle = {'type': 'label', 'value': '{}
'.format(key)}
items.append(subtitle)
if isinstance(value, list):
for item in value:
@@ -593,7 +592,7 @@ class BaseHandler(object):
# Create and trigger event
session.event_hub.publish(
- fa_session.ftrack_api.event.base.Event(
+ ftrack_api.event.base.Event(
topic=topic,
data=_event_data,
source=dict(user=_user_data)
@@ -614,7 +613,7 @@ class BaseHandler(object):
if not source and event:
source = event.get("source")
# Create and trigger event
- event = fa_session.ftrack_api.event.base.Event(
+ event = ftrack_api.event.base.Event(
topic=topic,
data=event_data,
source=source
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
index f504a52f9e..383ed0098b 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
@@ -28,7 +28,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'plate': 'img',
'audio': 'audio',
'workfile': 'scene',
- 'animation': 'cache'
+ 'animation': 'cache',
+ 'image': 'img'
}
def process(self, instance):
diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py
index fce6b0b5c7..5a00437a6f 100644
--- a/pype/plugins/global/publish/integrate_new.py
+++ b/pype/plugins/global/publish/integrate_new.py
@@ -70,7 +70,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"audio",
"yetiRig",
"yeticache",
- "source"
+ "source",
+ "matchmove",
+ "image"
]
exclude_families = ["clip"]
diff --git a/pype/plugins/maya/load/load_matchmove.py b/pype/plugins/maya/load/load_matchmove.py
new file mode 100644
index 0000000000..abc702cde8
--- /dev/null
+++ b/pype/plugins/maya/load/load_matchmove.py
@@ -0,0 +1,30 @@
+from avalon import api
+from maya import mel
+
+
+class MatchmoveLoader(api.Loader):
+ """
+ This will run matchmove script to create track in scene.
+
+ Supported script types are .py and .mel
+ """
+
+ families = ["matchmove"]
+ representations = ["py", "mel"]
+ defaults = ["Camera", "Object", "Mocap"]
+
+ label = "Run matchmove script"
+ icon = "empire"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+ if self.fname.lower().endswith(".py"):
+ exec(open(self.fname).read())
+
+ elif self.fname.lower().endswith(".mel"):
+ mel.eval('source "{}"'.format(self.fname))
+
+ else:
+ self.log.error("Unsupported script type")
+
+ return True
diff --git a/pype/plugins/nuke/load/load_matchmove.py b/pype/plugins/nuke/load/load_matchmove.py
new file mode 100644
index 0000000000..6a674368fb
--- /dev/null
+++ b/pype/plugins/nuke/load/load_matchmove.py
@@ -0,0 +1,24 @@
+from avalon import api
+
+
+class MatchmoveLoader(api.Loader):
+ """
+ This will run matchmove script to create track in script.
+ """
+
+ families = ["matchmove"]
+ representations = ["py"]
+ defaults = ["Camera", "Object"]
+
+ label = "Run matchmove script"
+ icon = "empire"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+ if self.fname.lower().endswith(".py"):
+ exec(open(self.fname).read())
+
+ else:
+ self.log.error("Unsupported script type")
+
+ return True
diff --git a/pype/plugins/standalonepublisher/publish/collect_context.py b/pype/plugins/standalonepublisher/publish/collect_context.py
index 43e2350be4..327b99f432 100644
--- a/pype/plugins/standalonepublisher/publish/collect_context.py
+++ b/pype/plugins/standalonepublisher/publish/collect_context.py
@@ -45,66 +45,71 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
with open(input_json_path, "r") as f:
in_data = json.load(f)
- asset_name = in_data['asset']
- family_preset_key = in_data.get('family_preset_key', '')
- family = in_data['family']
- subset = in_data['subset']
+ asset_name = in_data["asset"]
+ family_preset_key = in_data.get("family_preset_key", "")
+ family = in_data["family"]
+ subset = in_data["subset"]
# Load presets
presets = context.data.get("presets")
if not presets:
from pypeapp import config
+
presets = config.get_presets()
# Get from presets anatomy key that will be used for getting template
# - default integrate new is used if not set
- anatomy_key = presets.get(
- "standalone_publish", {}).get(
- "families", {}).get(
- family_preset_key, {}).get(
- "anatomy_template"
+ anatomy_key = (
+ presets.get("standalone_publish", {})
+ .get("families", {})
+ .get(family_preset_key, {})
+ .get("anatomy_template")
)
- project = io.find_one({'type': 'project'})
- asset = io.find_one({
- 'type': 'asset',
- 'name': asset_name
- })
- context.data['project'] = project
- context.data['asset'] = asset
+ project = io.find_one({"type": "project"})
+ asset = io.find_one({"type": "asset", "name": asset_name})
+ context.data["project"] = project
+ context.data["asset"] = asset
instance = context.create_instance(subset)
- instance.data.update({
- "subset": subset,
- "asset": asset_name,
- "label": subset,
- "name": subset,
- "family": family,
- "frameStart": in_data.get("representations", [None])[0].get("frameStart", None),
- "frameEnd": in_data.get("representations", [None])[0].get("frameEnd", None),
- "families": [family, 'ftrack'],
- })
+ instance.data.update(
+ {
+ "subset": subset,
+ "asset": asset_name,
+ "label": subset,
+ "name": subset,
+ "family": family,
+ "version": in_data.get("version", 1),
+ "frameStart": in_data.get("representations", [None])[0].get(
+ "frameStart", None
+ ),
+ "frameEnd": in_data.get("representations", [None])[0].get(
+ "frameEnd", None
+ ),
+ "families": [family, "ftrack"],
+ }
+ )
self.log.info("collected instance: {}".format(instance.data))
self.log.info("parsing data: {}".format(in_data))
- instance.data['destination_list'] = list()
- instance.data['representations'] = list()
- instance.data['source'] = 'standalone publisher'
+ instance.data["destination_list"] = list()
+ instance.data["representations"] = list()
+ instance.data["source"] = "standalone publisher"
- for component in in_data['representations']:
+ for component in in_data["representations"]:
- component['destination'] = component['files']
- component['stagingDir'] = component['stagingDir']
+ component["destination"] = component["files"]
+ component["stagingDir"] = component["stagingDir"]
# Do not set anatomy_template if not specified
if anatomy_key:
- component['anatomy_template'] = anatomy_key
- if isinstance(component['files'], list):
- collections, remainder = clique.assemble(component['files'])
+ component["anatomy_template"] = anatomy_key
+ if isinstance(component["files"], list):
+ collections, remainder = clique.assemble(component["files"])
self.log.debug("collecting sequence: {}".format(collections))
instance.data["frameStart"] = int(component["frameStart"])
instance.data["frameEnd"] = int(component["frameEnd"])
- instance.data['fps'] = int(component['fps'])
+ instance.data["fps"] = int(component["fps"])
if component["preview"]:
instance.data["families"].append("review")
diff --git a/pype/plugins/standalonepublisher/publish/collect_matchmove.py b/pype/plugins/standalonepublisher/publish/collect_matchmove.py
new file mode 100644
index 0000000000..b46efc1cf3
--- /dev/null
+++ b/pype/plugins/standalonepublisher/publish/collect_matchmove.py
@@ -0,0 +1,29 @@
+"""
+Requires:
+ Nothing
+
+Provides:
+ Instance
+"""
+
+import pyblish.api
+import logging
+
+
+log = logging.getLogger("collector")
+
+
+class CollectMatchmovePublish(pyblish.api.InstancePlugin):
+ """
+ Collector with only one reason for its existence - remove 'ftrack'
+ family implicitly added by Standalone Publisher
+ """
+
+ label = "Collect Matchmove - SA Publish"
+ order = pyblish.api.CollectorOrder
+ family = ["matchmove"]
+ hosts = ["standalonepublisher"]
+
+ def process(self, instance):
+ if "ftrack" in instance.data["families"]:
+ instance.data["families"].remove("ftrack")
diff --git a/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py b/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py
index 04ee88229f..9528e96ebf 100644
--- a/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py
+++ b/pype/standalonepublish/widgets/model_filter_proxy_recursive_sort.py
@@ -1,4 +1,5 @@
from . import QtCore
+import re
class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel):