diff --git a/pype/ftrack/actions_temp/event_sync_new.py b/pype/ftrack/actions_temp/event_sync_new.py index 01944ca0ef..17cafdfc2b 100644 --- a/pype/ftrack/actions_temp/event_sync_new.py +++ b/pype/ftrack/actions_temp/event_sync_new.py @@ -1,6 +1,7 @@ import os import collections import re +import copy import queue import time import toml @@ -43,7 +44,7 @@ class SyncToAvalonEvent(BaseEvent): entities_query_by_id = ( "select id, name, parent_id, link, custom_attributes from TypedContext" - " where project_id is \"{}\" and entity_id in ({})" + " where project_id is \"{}\" and id in ({})" ) entities_name_query_by_name = ( "select id, name from TypedContext" @@ -98,8 +99,6 @@ class SyncToAvalonEvent(BaseEvent): def avalon_ents_by_name(self): if self._avalon_ents_by_name is None: self._avalon_ents_by_name = {} - # TODO Do we need this split? - # - project should not be in avalon_ent_by_name proj, ents = self.avalon_entities for ent in ents: self._avalon_ents_by_name[ent["name"]] = ent @@ -153,6 +152,26 @@ class SyncToAvalonEvent(BaseEvent): ) return self._avalon_subsets_by_parents + @property + def avalon_archived_by_id(self): + if self._avalon_archived_by_id is None: + self._avalon_archived_by_id = {} + self.dbcon.install() + self.dbcon.Session["AVALON_PROJECT"] = ( + self.cur_project["full_name"] + ) + for asset in self.dbcon.find({"type": "archived_asset"}): + self._avalon_archived_by_id[asset["_id"]] = asset + return self._avalon_archived_by_id + + @property + def avalon_archived_by_name(self): + if self._avalon_archived_by_name is None: + self._avalon_archived_by_name = {} + for asset in self.avalon_archived_by_id.values(): + self._avalon_archived_by_name[asset["name"]] = asset + return self._avalon_archived_by_name + @property def changeability_by_mongo_id(self): """Return info about changeability of entity and it's parents.""" @@ -160,7 +179,8 @@ class SyncToAvalonEvent(BaseEvent): self._changeability_by_mongo_id = collections.defaultdict( lambda: True ) - self._changeability_by_mongo_id[self.avalon_project_id] = False + avalon_project, avalon_entities = self.avalon_entities + self._changeability_by_mongo_id[avalon_project["_id"]] = False self._bubble_changeability( list(self.avalon_subsets_by_parents.keys()) ) @@ -180,7 +200,7 @@ class SyncToAvalonEvent(BaseEvent): if self._avalon_ents is None: return - if not isinstance(values): + if not isinstance(values, (list, tuple)): values = [values] def get_found_data(entity): @@ -203,8 +223,6 @@ class SyncToAvalonEvent(BaseEvent): project, entities = self._avalon_ents key_items = key.split(".") for value in values: - # TODO add more keys (if possible) - # - this way not allow parent_id ent = None if key == "_id": if self._avalon_ents_by_id is not None: @@ -247,7 +265,10 @@ class SyncToAvalonEvent(BaseEvent): name = data["name"] entity = data["entity"] - self._avalon_ents.remove(entity) + project, ents = self._avalon_ents + ents.remove(entity) + self._avalon_ents = project, ents + if self._avalon_ents_by_ftrack_id is not None: self._avalon_ents_by_ftrack_id.pop(ftrack_id, None) @@ -260,6 +281,9 @@ class SyncToAvalonEvent(BaseEvent): if self._avalon_ents_by_name is not None: self._avalon_ents_by_name.pop(name, None) + if self._avalon_archived_by_id is not None: + self._avalon_archived_by_id[mongo_id] = entity + if mongo_id in self.task_changes_by_avalon_id: self.task_changes_by_avalon_id.pop(mongo_id) @@ -301,7 +325,6 @@ class SyncToAvalonEvent(BaseEvent): "Parent <{}> for subsets <{}> does not exist" ).format(str(entity_id), joined_subset_ids)) else: - # TODO logging - What is happening here? self.log.warning(( "In avalon are entities without valid parents that" " lead to Project (should not cause errors)" @@ -328,6 +351,8 @@ class SyncToAvalonEvent(BaseEvent): self._avalon_ents_by_name = None self._avalon_subsets_by_parents = None self._changeability_by_mongo_id = None + self._avalon_archived_by_id = None + self._avalon_archived_by_name = None self.task_changes_by_avalon_id = {} @@ -336,15 +361,17 @@ class SyncToAvalonEvent(BaseEvent): self.ftrack_ents_by_id = {} self.obj_id_ent_type_map = {} + self.ftrack_recreated_mapping = {} self.ftrack_added = {} self.ftrack_moved = {} self.ftrack_renamed = {} self.ftrack_updated = {} self.ftrack_removed = {} - self.hierarchy_update = [] + self.moved_in_avalon = [] self.renamed_in_avalon = [] + self.hier_cust_attrs_changes = collections.defaultdict(list) self.duplicated = [] self.regex_fail = [] @@ -352,6 +379,12 @@ class SyncToAvalonEvent(BaseEvent): self.regex_schemas = {} self.updates = collections.defaultdict(dict) + self.report_items = { + "info": collections.defaultdict(list), + "warning": collections.defaultdict(list), + "error": collections.defaultdict(list) + } + def set_process_session(self, session): try: self.process_session.close() @@ -421,7 +454,6 @@ class SyncToAvalonEvent(BaseEvent): return "/".join([ent["name"] for ent in entity["link"]]) def launch(self, session, event): - return True # Try to commit and if any error happen then recreate session try: self.process_session.commit() @@ -438,6 +470,7 @@ class SyncToAvalonEvent(BaseEvent): "move": {}, "add": {} } + self.log.debug("Starting event") entities_info = event["data"]["entities"] found_actions = set() for ent_info in entities_info: @@ -471,6 +504,9 @@ class SyncToAvalonEvent(BaseEvent): found_actions = list(found_actions) if not found_actions: + self.log.debug("There are not actions to do") + from pprint import pprint + pprint(event.__dict__) return True # Check if auto sync was turned on/off @@ -513,6 +549,7 @@ class SyncToAvalonEvent(BaseEvent): found_actions[0] == "update" and not updated ): + self.log.debug("There are not actions to process after filtering") return True ft_project = self.cur_project @@ -528,30 +565,31 @@ class SyncToAvalonEvent(BaseEvent): # Skip if auto-sync is not set auto_sync = ft_project["custom_attributes"][cust_attr_auto_sync] - if auto_sync != "1": + if auto_sync is not True: + self.log.debug("Auto sync is not turned on") return True # Get ftrack entities - find all ftrack ids first ftrack_ids = [] - for ent_info in updated: - ftrack_ids.append(ent_info["entityId"]) + for ftrack_id in updated: + ftrack_ids.append(ftrack_id) - for action, ent_infos in entities_by_action.items(): + for action, ftrack_ids in entities_by_action.items(): # skip updated (already prepared) and removed (not exist in ftrack) if action in ["update", "remove"]: continue - for ent_info in ent_infos: - ftrack_id = ent_info["entityId"] + for ftrack_id in ftrack_ids: if ftrack_id not in ftrack_ids: ftrack_ids.append(ftrack_id) - joined_ids = ", ".join(["\"{}\"".format(id) for id in ftrack_ids]) - ftrack_entities = self.process_session.query( - self.entities_query_by_id.format(ft_project["id"], joined_ids) - ).all() - for entity in ftrack_entities: - self.ftrack_ents_by_id[entity["id"]] = entity + if ftrack_ids: + joined_ids = ", ".join(["\"{}\"".format(id) for id in ftrack_ids]) + ftrack_entities = self.process_session.query( + self.entities_query_by_id.format(ft_project["id"], joined_ids) + ).all() + for entity in ftrack_entities: + self.ftrack_ents_by_id[entity["id"]] = entity # Filter updates where name is changing for ftrack_id, ent_info in updated.items(): @@ -560,7 +598,7 @@ class SyncToAvalonEvent(BaseEvent): if "name" not in ent_keys: continue - _ent_info = ent_info.copy() + _ent_info = copy.deepcopy(ent_info) for ent_key in ent_keys: if ent_key == "name": ent_info["changes"].pop(ent_key, None) @@ -568,6 +606,7 @@ class SyncToAvalonEvent(BaseEvent): else: _ent_info["changes"].pop(ent_key, None) _ent_info["keys"].remove(ent_key) + self.ftrack_renamed[ftrack_id] = _ent_info self.ftrack_removed = entities_by_action["remove"] @@ -575,16 +614,34 @@ class SyncToAvalonEvent(BaseEvent): self.ftrack_added = entities_by_action["add"] self.ftrack_updated = updated + self.log.debug("Main process started") + time_1 = time.time() # 1.) Process removed - may affect all other actions self.process_removed() + time_2 = time.time() # 2.) Process renamed - may affect added self.process_renamed() + time_3 = time.time() # 3.) Process added - moved entity may be moved to new entity self.process_added() + time_4 = time.time() # 4.) Process moved self.process_moved() + time_5 = time.time() # 5.) Process updated self.process_updated() + time_6 = time.time() + # 6.) Process changes in hierarchy or hier custom attribues + self.process_hier_cleanup() + time_7 = time.time() + + self.log.debug("Main process finished") + print(time_2 - time_1) + print(time_3 - time_2) + print(time_4 - time_3) + print(time_5 - time_4) + print(time_6 - time_5) + print(time_7 - time_6) return True @@ -594,9 +651,8 @@ class SyncToAvalonEvent(BaseEvent): ent_infos = self.ftrack_removed removable_ids = [] recreate_ents = [] - removed_mapping = {} removed_names = [] - for ftrack_id, removed in ent_infos: + for ftrack_id, removed in ent_infos.items(): entity_type = removed["entity_type"] parent_id = removed["parentId"] removed_name = removed["changes"]["name"]["old"] @@ -647,19 +703,132 @@ class SyncToAvalonEvent(BaseEvent): (line[1].get("data", {}).get("parents") or []) )) ) - # TODO recreate entities - # remove mapping is for mapping removed and new entities - removed_mapping["removed_ftrack_id"] = "new_ftrack_id" + + proj, ents = self.avalon_entities + for avalon_entity in recreate_ents: + old_ftrack_id = avalon_entity["data"]["ftrackId"] + vis_par = avalon_entity["data"]["visualParent"] + if vis_par is None: + vis_par = proj["_id"] + parent_ent = self.avalon_ents_by_id[vis_par] + parent_ftrack_id = parent_ent["data"]["ftrackId"] + parent_ftrack_ent = self.ftrack_ents_by_id.get( + parent_ftrack_id + ) + if not parent_ftrack_ent: + if parent_ent["type"].lower() == "project": + parent_ftrack_ent = self.cur_project + else: + parent_ftrack_ent = self.process_session.query( + self.entities_query_by_id.format( + self.cur_project["id"], parent_ftrack_id + ) + ).one() + entity_type = avalon_entity["data"]["entityType"] + new_entity = self.process_session.create(entity_type, { + "name": avalon_entity["name"], + "parent": parent_ftrack_ent + }) + try: + self.process_session.commit() + except Exception: + # TODO logging + # TODO report + self.process_session.rolback() + continue + + new_entity_id = new_entity["id"] + avalon_entity["data"]["ftrackId"] = new_entity_id + + for key, val in avalon_entity["data"].items(): + if not val: + continue + if key not in new_entity["custom_attributes"]: + continue + + new_entity["custom_attributes"][key] = val + + new_entity["custom_attributes"][cust_attr_id_key] = ( + str(avalon_entity["_id"]) + ) + try: + self.process_session.commit() + except Exception: + # TODO logging + # TODO report + self.process_session.rolback() + self.log.warning( + "Process session commit failed!", + exc_info=True + ) + continue + + self.ftrack_recreated_mapping[old_ftrack_id] = new_entity_id + self.process_session.commit() + + found_idx = None + for idx, _entity in enumerate(self._avalon_ents): + if _entity["_id"] == avalon_entity["_id"]: + found_idx = idx + break + + if found_idx is None: + continue + + # Prepare updates dict for mongo update + if "data" not in self.updates[avalon_entity["_id"]]: + self.updates[avalon_entity["_id"]]["data"] = {} + + self.updates[avalon_entity["_id"]]["data"]["ftrackId"] = ( + new_entity_id + ) + # Update cached entities + self._avalon_ents[found_idx] = avalon_entity + + if self._avalon_ents_by_id is not None: + mongo_id = avalon_entity["_id"] + self._avalon_ents_by_id[mongo_id] = avalon_entity + + if self._avalon_ents_by_parent_id is not None: + vis_par = avalon_entity["data"]["visualParent"] + children = self._avalon_ents_by_parent_id[vis_par] + found_idx = None + for idx, _entity in enumerate(children): + if _entity["_id"] == avalon_entity["_id"]: + found_idx = idx + break + children[found_idx] = avalon_entity + self._avalon_ents_by_parent_id[vis_par] = children + + if self._avalon_ents_by_ftrack_id is not None: + self._avalon_ents_by_ftrack_id.pop(old_ftrack_id) + self._avalon_ents_by_ftrack_id[new_entity_id] = ( + avalon_entity + ) + + if self._avalon_ents_by_name is not None: + name = avalon_entity["name"] + self._avalon_ents_by_name[name] = avalon_entity # Check if entities with same name can be synchronized if not removed_names: return + self.check_names_synchronizable(removed_names) + + def check_names_synchronizable(self, names): + """Check if entities with specific names are importable. + + This check should happend after removing entity or renaming entity. + When entity was removed or renamed then it's name is possible to sync. + """ joined_passed_names = ", ".join( - ["\"{}\"".format(name) for name in removed_names] + ["\"{}\"".format(name) for name in names] ) same_name_entities = self.process_session.query( - self.entities_name_query_by_name.format(joined_passed_names) + self.entities_name_query_by_name.format( + self.cur_project["id"], joined_passed_names + ) ).all() if not same_name_entities: return @@ -678,13 +847,15 @@ class SyncToAvalonEvent(BaseEvent): self.log.debug(( "Name \"{}\" still have more than one entity <{}>" ).format( - name, "| ".join([self.get_ent_path(ent) for ent in ents]) + name, "| ".join( + [self.get_ent_path(ent["id"]) for ent in ents] + ) )) continue entity = ents[0] self.log.debug("Checking if can synchronize entity <{}>".format( - self.get_ent_path(entity) + self.get_ent_path(entity["id"]) )) # skip if already synchronized ftrack_id = entity["id"] @@ -709,15 +880,50 @@ class SyncToAvalonEvent(BaseEvent): key=(lambda entity: len(entity["link"])) ) + children_queue = queue.Queue() for entity in synchronizable_ents: parent_avalon_ent = self.avalon_ents_by_ftrack_id[ entity["parent_id"] ] self.create_entity_in_avalon(entity, parent_avalon_ent) + for child in entity["children"]: if child.entity_type.lower() == "task": continue - # TODO create children and children of children and children of children of children + children_queue.put(child) + + while not children_queue.empty(): + entity = children_queue.get() + ftrack_id = entity["id"] + name = entity["name"] + ent_by_ftrack_id = self.avalon_ents_by_ftrack_id.get(ftrack_id) + if ent_by_ftrack_id: + raise Exception(( + "This is bug, parent was just synchronized to avalon" + " but entity is already in database {}" + ).format(dict(entity))) + + # Entity has duplicated name with another entity + # - may be renamed: in that case renaming method will handle that + duplicate_ent = self.avalon_ents_by_name.get(name) + if duplicate_ent: + continue + + passed_regex = avalon_sync.check_regex( + name, "asset", schema_patterns=self.regex_schemas + ) + if not passed_regex: + continue + + parent_id = entity["parent_id"] + parent_avalon_ent = self.avalon_ents_by_ftrack_id[parent_id] + + self.create_entity_in_avalon(entity, parent_avalon_ent) + + for child in entity["children"]: + if child.entity_type.lower() == "task": + continue + children_queue.put(child) def create_entity_in_avalon(self, ftrack_ent, parent_avalon): proj, ents = self.avalon_entities @@ -742,9 +948,10 @@ class SyncToAvalonEvent(BaseEvent): vis_par = parent_avalon["_id"] mongo_id = ObjectId() + name = ftrack_ent["name"] final_entity = { "_id": mongo_id, - "name": ftrack_ent["name"], + "name": name, "type": "asset", "schema": entity_schemas["asset"], "parent": proj["_id"], @@ -758,18 +965,53 @@ class SyncToAvalonEvent(BaseEvent): } } cust_attrs = self.get_cust_attr_values(ftrack_ent) - for key, val in cust_attrs: + for key, val in cust_attrs.items(): final_entity["data"][key] = val - schema.validate(final_entity) - self.dbcon.insert_one(final_entity) + test_queue = queue.Queue() + test_queue.put(final_entity) + while not test_queue.empty(): + in_dict = test_queue.get() + for key, val in in_dict.items(): + if isinstance(val, dict): + test_queue.put(val) + continue + schema.validate(final_entity) + + replaced = False + archived = self.avalon_archived_by_name.get(name) + if archived: + archived_id = archived["_id"] + if ( + archived["data"]["parents"] == parents or + self.changeability_by_mongo_id[archived_id] + ): + mongo_id = archived_id + final_entity["_id"] = mongo_id + self.dbcon.replace_one({"_id": mongo_id}, final_entity) + replaced = True + + if not replaced: + self.dbcon.insert_one(final_entity) + + ftrack_ent["custom_attributes"][cust_attr_id_key] = str(mongo_id) + try: + self.process_session.commit() + except Exception: + self.process_session.rolback() + # TODO logging + # TODO report + + # modify cached data # Skip if self._avalon_ents is not set(maybe never happen) if self._avalon_ents is None: return final_entity if self._avalon_ents is not None: - self._avalon_ents.append(final_entity) + proj, ents = self._avalon_ents + ents.append(final_entity) + self._avalon_ents = (proj, ents) if self._avalon_ents_by_id is not None: self._avalon_ents_by_id[mongo_id] = final_entity @@ -831,7 +1073,7 @@ class SyncToAvalonEvent(BaseEvent): defaults[key] = attr["default"] hier_values = avalon_sync.get_hierarchical_attributes( - self.processing_session, entity, hier_keys, defaults + self.process_session, entity, hier_keys, defaults ) for key, val in hier_values.items(): output[key] = val @@ -879,7 +1121,12 @@ class SyncToAvalonEvent(BaseEvent): self.process_session.rollback() # TODO report # TODO logging + self.log.warning( + "Process session commit failed!", + exc_info=True + ) + old_names = [] # Process renaming in Avalon DB while not changeable_queue.empty(): ftrack_id, avalon_ent, new_name = changeable_queue.get() @@ -893,11 +1140,11 @@ class SyncToAvalonEvent(BaseEvent): passed_regex = avalon_sync.check_regex( new_name, _entity_type, schema_patterns=self.regex_schemas ) + ent_path = self.get_ent_path(ftrack_id) if not passed_regex: # TODO report # TODO logging # new name does not match regex (letters numbers and _) - ent_path = self.get_ent_path(ftrack_id) # TODO move this to special report method self.log.warning( "Entity name contain invalid symbols <{}>".format(ent_path) @@ -911,10 +1158,14 @@ class SyncToAvalonEvent(BaseEvent): old_val["name"] = new_name self._avalon_ents_by_name[new_name] = old_val self.updates[mongo_id] = {"name": new_name} - self.renamed_in_avalon.append(ftrack_id) + self.renamed_in_avalon.append(mongo_id) + + old_names.append(old_name) + if new_name in old_names: + old_names.remove(new_name) + # TODO report # TODO logging - # TODO go through children to change parents self.log.debug( "Name of entity will be changed to \"{}\" <{}>".format( new_name, ent_path @@ -945,6 +1196,9 @@ class SyncToAvalonEvent(BaseEvent): ) ) + if old_names: + self.check_names_synchronizable(old_names) + for parent_id, task_change in renamed_tasks.items(): avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id) ent_info = task_change["ent_info"] @@ -964,17 +1218,28 @@ class SyncToAvalonEvent(BaseEvent): new_name, "task", schema_patterns=self.regex_schemas ) if not passed_regex: + # TODO report + # TODO logging ftrack_id = ent_info["enityId"] - entity = self.ftrack_ents_by_id[ftrack_id] - entity["name"] = old_name - try: - self.process_session.commit() - # TODO report - # TODO logging - except Exception: - self.process_session.rollback() - # TODO report - # TODO logging + ent_path = self.get_ent_path(ftrack_id) + self.log.warning(( + "Name of renamed entity <{}> contain invalid symbols" + ).format(ent_path)) + # TODO should we rename back to previous name? + # entity = self.ftrack_ents_by_id[ftrack_id] + # entity["name"] = old_name + # try: + # self.process_session.commit() + # # TODO report + # # TODO logging + # except Exception: + # self.process_session.rollback() + # # TODO report + # # TODO logging + # self.log.warning( + # "Process session commit failed!", + # exc_info=True + # ) continue @@ -984,7 +1249,8 @@ class SyncToAvalonEvent(BaseEvent): if new_name not in self.task_changes_by_avalon_id[mongo_id]: self.task_changes_by_avalon_id[mongo_id].append(new_name) - # TODO process not_found + # not_found are not processed since all not found are not found + # because they are not synchronizable def process_added(self): ent_infos = self.ftrack_added @@ -1020,7 +1286,6 @@ class SyncToAvalonEvent(BaseEvent): to_sync_by_id[ft_id] = self.ftrack_ents_by_id[ft_id] # cache regex success (for tasks) - not_found_parents = [] duplicated = [] regex_failed = [] for ftrack_id, entity in to_sync_by_id.items(): @@ -1032,119 +1297,122 @@ class SyncToAvalonEvent(BaseEvent): parent_id = entity["parent_id"] parent_avalon = self.avalon_ents_by_ftrack_id.get(parent_id) if not parent_avalon: - parent_avalon = self.process_parent_nonexistence(parent_id) - if not parent_avalon: - not_found_parents.append(ftrack_id) - continue + continue + is_synchonizable = True name = entity["name"] passed_regex = avalon_sync.check_regex( name, "asset", schema_patterns=self.regex_schemas ) if not passed_regex: regex_failed.append(ftrack_id) + is_synchonizable = False if name in self.avalon_ents_by_name: duplicated.append(ftrack_id) + is_synchonizable = False - ids_to_pop = list(set(regex_failed).union(set(duplicated))) - - # TODO go through not found parents - parent_already_exist = [] - for ftrack_id in not_found_parents: - if ftrack_id in ids_to_pop: - continue - entity = self.ftrack_ents_by_id[ftrack_id] - parent_id = entity["parent_id"] - result = self.process_parent_nonexistence(parent_id) - if not result: - ids_to_pop.append(ftrack_id) + if not is_synchonizable: continue - parent_already_exist.append(ftrack_id) + self.create_entity_in_avalon(entity, parent_avalon) - ignored_entities = {} - for ftrack_id in ids_to_pop: - ignored_entities[ftrack_id] = to_sync_by_id.pop(ftrack_id) + for parent_id, ent_infos in new_tasks_by_parent.items(): + avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id) + if not avalon_ent: + continue + mongo_id = avalon_ent["_id"] + if mongo_id not in self.task_changes_by_avalon_id: + self.task_changes_by_avalon_id[mongo_id] = ( + avalon_ent["data"]["tasks"] + ) + for ent_info in ent_infos: + new_name = ent_info["changes"]["name"]["new"] + if new_name not in self.task_changes_by_avalon_id[mongo_id]: + self.task_changes_by_avalon_id[mongo_id].append(new_name) - # joined_passed_names = ", ".join( - # ["\"{}\"".format(name) for name in all_passed_names] - # ) - # name_ftrack_ents = self.process_session.query( - # self.entities_name_query_by_name.format( - # self.cur_project["id"], joined_passed_names - # ) - # ).all() - # - # ents_by_name = collections.defaultdict(list) - # for entity in name_ftrack_ents: - # ftrack_id = entity["id"] - # if ftrack_id not in self.ftrack_ents_by_id: - # self.ftrack_ents_by_id[ftrack_id] = entity - # ents_by_name[entity["name"]].append(entity) - + # TODO process duplicates and regex failed entities def process_moved(self): if not self.ftrack_moved: return - found = {} - not_found = {} - for ftrack_id, ent_info in self.ftrack_moved.items(): + ftrack_moved = {k: v for k, v in sorted( + self.ftrack_moved.items(), + key=(lambda line: len( + (line[1].get("data", {}).get("parents") or []) + )) + )} + + for ftrack_id, ent_info in ftrack_moved.items(): avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id) if not avalon_ent: - not_found[ftrack_id] = ent_info continue - found[ftrack_id] = { - "avalon_ent": avalon_ent, - "ent_info": ent_info, - "renamed": ftrack_id in self.ftrack_renamed - } - - # TODO process not_found (not synchronized) - - for ftrack_id, ent_dict in found.items(): - updates = {} - avalon_ent = ent_dict["avalon_ent"] - ent_info = ent_dict["ent_info"] - renamed = ent_dict["renamed"] - new_parent_id = ent_info["changes"]["parent_id"]["new"] old_parent_id = ent_info["changes"]["parent_id"]["old"] mongo_id = avalon_ent["_id"] if self.changeability_by_mongo_id[mongo_id]: - # TODO implement par_av_ent = self.avalon_ents_by_ftrack_id.get(new_parent_id) + if not par_av_ent: + # TODO logging + # TODO report + ent_path_items = [self.cur_project["full_name"]] + ent_path_items.extend(avalon_ent["data"]["parents"]) + ent_path = "/".join(ent_path_items) + self.log.warning(( + "Parent of moved entity <{}> does not exist" + ).format(ent_path)) + continue # THIS MUST HAPPEND AFTER CREATING NEW ENTITIES !!!! - updates["data.visualParent"] = par_av_ent["_id"] - if renamed: - pass + # - because may be moved to new created entity + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + self.updates[mongo_id]["data"]["visualParent"] = ( + par_av_ent["_id"] + ) + self.moved_in_avalon.append(mongo_id) - def process_updated(self, ent_infos): + else: + if old_parent_id in self.ftrack_recreated_mapping: + old_parent_id = ( + self.ftrack_recreated_mapping[old_parent_id] + ) + ftrack_ent = self.ftrack_ents_by_id[ftrack_id] + ftrack_ent["parent_id"] = old_parent_id + try: + self.process_session.commit() + except Exception: + self.process_session.rollback() + # TODO logging + # TODO report + self.log.warning( + "Process session commit failed!", + exc_info=True + ) + + def process_updated(self): # Only custom attributes changes should get here - if not ent_infos: + if not self.ftrack_updated: return - # TODO check if entity exist in mongo!!!! - # Try to create if not (In that case `update` does not make sence) + ent_infos = self.ftrack_updated ftrack_mongo_mapping = {} - not_found = {} not_found_ids = [] for ftrack_id, ent_info in ent_infos.items(): avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id) - if avalon_ent: - ftrack_mongo_mapping[ftrack_id] = avalon_ent["_id"] + if not avalon_ent: + not_found_ids.append(ftrack_id) continue - not_found_ids.append(ftrack_id) + + ftrack_mongo_mapping[ftrack_id] = avalon_ent["_id"] for ftrack_id in not_found_ids: - not_found[ftrack_id] = ent_infos.pop(ftrack_id) + ent_infos.pop(ftrack_id) - self.process_not_found(not_found) if not ent_infos: return @@ -1163,19 +1431,22 @@ class SyncToAvalonEvent(BaseEvent): obj_id = cust_attr["object_type_id"] cust_attrs_by_obj_id[obj_id][key] = cust_attr - for ftrack_id, ent_info in ent_infos: + hier_attrs_keys = [attr["key"] for attr in hier_attrs] + + for ftrack_id, ent_info in ent_infos.items(): mongo_id = ftrack_mongo_mapping[ftrack_id] - entType = ent_infos["entityType"] + entType = ent_info["entityType"] if entType == "show": ent_cust_attrs = cust_attrs_by_obj_id.get("show") else: - obj_type_id = ent_infos["objectTypeId"] + obj_type_id = ent_info["objectTypeId"] ent_cust_attrs = cust_attrs_by_obj_id.get(obj_type_id) - if not ent_cust_attrs: - continue - for key, values in ent_info["changes"].items(): + if key in hier_attrs_keys: + self.hier_cust_attrs_changes[key].append(ftrack_id) + continue + if key not in ent_cust_attrs: continue @@ -1184,410 +1455,305 @@ class SyncToAvalonEvent(BaseEvent): value = values["new"] self.updates[mongo_id]["data"][key] = value - def process_not_found(self, ent_infos): - raise NotImplemented("process_not_found is not Implemented yet") + def process_hier_cleanup(self): + if ( + not self.moved_in_avalon and + not self.renamed_in_avalon and + not self.hier_cust_attrs_changes and + not self.task_changes_by_avalon_id + ): + return + + cust_attrs, hier_attrs = self.avalon_cust_attrs + + parent_changes = [] + hier_cust_attrs_ids = [] + hier_cust_attrs_keys = [] + all_keys = False + for mongo_id in self.moved_in_avalon: + parent_changes.append(mongo_id) + hier_cust_attrs_ids.append(mongo_id) + all_keys = True + + for mongo_id in self.renamed_in_avalon: + if mongo_id not in parent_changes: + parent_changes.append(mongo_id) + + for key, ftrack_ids in self.hier_cust_attrs_changes.items(): + if key.startswith("avalon_"): + continue + for ftrack_id in ftrack_ids: + avalon_ent = self.avalon_ents_by_ftrack_id[ftrack_id] + mongo_id = avalon_ent["_id"] + if mongo_id in hier_cust_attrs_ids: + continue + hier_cust_attrs_ids.append(mongo_id) + if not all_keys and key not in hier_cust_attrs_keys: + hier_cust_attrs_keys.append(key) + + # Tasks preparation **** + for mongo_id, tasks in self.task_changes_by_avalon_id.items(): + avalon_ent = self.avalon_ents_by_id[mongo_id] + if avalon_ent["data"].get("tasks") == tasks: + continue + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + + self.updates[mongo_id]["data"]["tasks"] = tasks + + # Parents preparation *** + mongo_to_ftrack_parents = {} + for mongo_id in parent_changes: + avalon_ent = self.avalon_ents_by_id[mongo_id] + ftrack_id = avalon_ent["data"]["ftrackId"] + ftrack_ent = self.ftrack_ents_by_id[ftrack_id] + mongo_to_ftrack_parents[mongo_id] = len(ftrack_ent["link"]) + + stored_parents_by_mongo = {} + # sort by hierarchy level + mongo_to_ftrack_parents = [k for k, v in sorted( + mongo_to_ftrack_parents.items(), + key=(lambda item: item[1]) + )] + for mongo_id in mongo_to_ftrack_parents: + avalon_ent = self.avalon_ents_by_id[mongo_id] + vis_par = avalon_ent["data"]["visualParent"] + if vis_par in stored_parents_by_mongo: + parents = [par for par in stored_parents_by_mongo[vis_par]] + if vis_par is not None: + parent_ent = self.avalon_ents_by_id[vis_par] + parents.append(parent_ent["name"]) + stored_parents_by_mongo[mongo_id] = parents + continue + + ftrack_id = avalon_ent["data"]["ftrackId"] + ftrack_ent = self.ftrack_ents_by_id[ftrack_id] + ent_path_items = [ent["name"] for ent in ftrack_ent["link"]] + parents = ent_path_items[1:len(ent_path_items)-1:] + stored_parents_by_mongo[mongo_id] = parents + + for mongo_id, parents in stored_parents_by_mongo.items(): + avalon_ent = self.avalon_ents_by_id[mongo_id] + cur_par = avalon_ent["data"]["parents"] + if cur_par == parents: + continue + + hierarchy = "" + if len(parents) > 0: + hierarchy = os.path.sep.join(parents) + + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + self.updates[mongo_id]["data"]["parents"] = parents + self.updates[mongo_id]["data"]["hierarchy"] = hierarchy + + # Skip custom attributes if didn't change + if not hier_cust_attrs_ids: + self.update_entities() + return + + # Hierarchical custom attributes preparation *** + if all_keys: + hier_cust_attrs_keys = [ + attr["key"] for attr in hier_attrs if ( + not attr["key"].startswith("avalon_") + ) + ] + + mongo_ftrack_mapping = {} + cust_attrs_ftrack_ids = [] + # ftrack_parenting = collections.defaultdict(list) + entities_dict = collections.defaultdict(dict) + + children_queue = queue.Queue() + parent_queue = queue.Queue() + + for mongo_id in hier_cust_attrs_ids: + avalon_ent = self.avalon_ents_by_id[mongo_id] + parent_queue.put(avalon_ent) + ftrack_id = avalon_ent["data"]["ftrackId"] + if ftrack_id not in entities_dict: + entities_dict[ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + + mongo_ftrack_mapping[mongo_id] = ftrack_id + cust_attrs_ftrack_ids.append(ftrack_id) + children_ents = self.avalon_ents_by_parent_id.get(mongo_id) or [] + for children_ent in children_ents: + _ftrack_id = children_ent["data"]["ftrackId"] + if _ftrack_id in entities_dict: + continue + + entities_dict[_ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + # if _ftrack_id not in ftrack_parenting[ftrack_id]: + # ftrack_parenting[ftrack_id].append(_ftrack_id) + entities_dict[_ftrack_id]["parent_id"] = ftrack_id + if _ftrack_id not in entities_dict[ftrack_id]["children"]: + entities_dict[ftrack_id]["children"].append(_ftrack_id) + children_queue.put(children_ent) + + while not children_queue.empty(): + avalon_ent = children_queue.get() + mongo_id = avalon_ent["_id"] + ftrack_id = avalon_ent["data"]["ftrackId"] + if ftrack_id in cust_attrs_ftrack_ids: + continue + + mongo_ftrack_mapping[mongo_id] = ftrack_id + cust_attrs_ftrack_ids.append(ftrack_id) + + children_ents = self.avalon_ents_by_parent_id.get(mongo_id) or [] + for children_ent in children_ents: + _ftrack_id = children_ent["data"]["ftrackId"] + if _ftrack_id in entities_dict: + continue + + entities_dict[_ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + entities_dict[_ftrack_id]["parent_id"] = ftrack_id + if _ftrack_id not in entities_dict[ftrack_id]["children"]: + entities_dict[ftrack_id]["children"].append(_ftrack_id) + children_queue.put(children_ent) + + while not parent_queue.empty(): + avalon_ent = parent_queue.get() + if avalon_ent["type"].lower() == "project": + continue + + ftrack_id = avalon_ent["data"]["ftrackId"] + + vis_par = avalon_ent["data"]["visualParent"] + if vis_par is None: + vis_par = avalon_ent["parent"] + + parent_ent = self.avalon_ents_by_id[vis_par] + parent_ftrack_id = parent_ent["data"]["ftrackId"] + if parent_ftrack_id not in entities_dict: + entities_dict[parent_ftrack_id] = { + "children": [], + "parent_id": None, + "hier_attrs": {} + } + + if ftrack_id not in entities_dict[parent_ftrack_id]["children"]: + entities_dict[parent_ftrack_id]["children"].append(ftrack_id) + + entities_dict[ftrack_id]["parent_id"] = parent_ftrack_id + + if parent_ftrack_id in cust_attrs_ftrack_ids: + continue + mongo_ftrack_mapping[vis_par] = parent_ftrack_id + cust_attrs_ftrack_ids.append(parent_ftrack_id) + # if ftrack_id not in ftrack_parenting[parent_ftrack_id]: + # ftrack_parenting[parent_ftrack_id].append(ftrack_id) + + parent_queue.put(parent_ent) + + # Prepare values to query + entity_ids_joined = ", ".join([ + "\"{}\"".format(id) for id in cust_attrs_ftrack_ids + ]) + attributes_joined = ", ".join([ + "\"{}\"".format(name) for name in hier_cust_attrs_keys + ]) + [values] = self.process_session._call([{ + "action": "query", + "expression": ( + "select value, entity_id from CustomAttributeValue " + "where entity_id in ({}) and configuration.key in ({})" + ).format(entity_ids_joined, attributes_joined) + }]) + ftrack_project_id = self.cur_project["id"] + + for attr in hier_attrs: + key = attr["key"] + if key not in hier_cust_attrs_keys: + continue + entities_dict[ftrack_project_id]["hier_attrs"][key] = ( + attr["default"] + ) + + # PREPARE DATA BEFORE THIS + avalon_hier = [] + for value in values["data"]: + if value["value"] is None: + continue + entity_id = value["entity_id"] + key = value["configuration"]["key"] + entities_dict[entity_id]["hier_attrs"][key] = value["value"] + + # Get dictionary with not None hierarchical values to pull to childs + project_values = {} + for key, value in ( + entities_dict[ftrack_project_id]["hier_attrs"].items() + ): + if value is not None: + project_values[key] = value + + for key in avalon_hier: + value = entities_dict[ftrack_project_id]["avalon_attrs"][key] + if value is not None: + project_values[key] = value + + hier_down_queue = queue.Queue() + hier_down_queue.put((project_values, ftrack_project_id)) + + while not hier_down_queue.empty(): + hier_values, parent_id = hier_down_queue.get() + for child_id in entities_dict[parent_id]["children"]: + _hier_values = hier_values.copy() + for name in hier_cust_attrs_keys: + value = entities_dict[child_id]["hier_attrs"].get(name) + if value is not None: + _hier_values[name] = value + + entities_dict[child_id]["hier_attrs"].update(_hier_values) + hier_down_queue.put((_hier_values, child_id)) + + ftrack_mongo_mapping = {} + for mongo_id, ftrack_id in mongo_ftrack_mapping.items(): + ftrack_mongo_mapping[ftrack_id] = mongo_id + + for ftrack_id, data in entities_dict.items(): + mongo_id = ftrack_mongo_mapping[ftrack_id] + avalon_ent = self.avalon_ents_by_id[mongo_id] + for key, value in data["hier_attrs"].items(): + if ( + key in avalon_ent["data"] and + avalon_ent["data"][key] == value + ): + continue + + if "data" not in self.updates[mongo_id]: + self.updates[mongo_id]["data"] = {} + + self.updates[mongo_id]["data"][key] = value + + self.update_entities() + + def update_entities(self): + mongo_changes_bulk = [] + for mongo_id, changes in self.updates.items(): + filter = {"_id": mongo_id} + change_data = avalon_sync.from_dict_to_set(changes) + mongo_changes_bulk.append(UpdateOne(filter, change_data)) + + if not mongo_changes_bulk: + # TODO logging + return + self.dbcon.bulk_write(mongo_changes_bulk) def register(session, plugins_presets): '''Register plugin. Called when used as an plugin.''' SyncToAvalonEvent(session, plugins_presets).register() - - -removed_example = { - 'action': 'remove', - 'changes': { - 'bid': {'new': None, 'old': 0.0}, - 'context_type': {'new': None, 'old': 'task'}, - 'description': {'new': None, 'old': ''}, - 'enddate': {'new': None, 'old': None}, - 'id': {'new': None, 'old': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6'}, - 'isopen': {'new': None, 'old': False}, - 'isrequirecomment': {'new': None, 'old': False}, - 'name': {'new': None, 'old': 'compositing'}, - 'object_typeid': {'new': None, 'old': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c'}, - 'parent_id': {'new': None, 'old': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - 'priorityid': {'new': None, 'old': '9661b320-3a0c-11e2-81c1-0800200c9a66'}, - 'showid': {'new': None, 'old': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - 'sort': {'new': None, 'old': 0.0}, - 'startdate': {'new': None, 'old': None}, - 'statusid': {'new': None, 'old': '44dd9fb2-4164-11df-9218-0019bb4983d8'}, - 'taskid': {'new': None, 'old': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6'}, - 'thumbid': {'new': None, 'old': None}, - 'typeid': {'new': None, 'old': '44dd23b6-4164-11df-9218-0019bb4983d8'} - }, - 'entityId': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Task', - 'keys': ['id', 'taskid', 'thumbid', 'context_type', 'name', 'parent_id', 'bid', 'description', 'startdate', 'enddate', 'statusid', 'typeid', 'priorityid', 'isopen', 'isrequirecomment', 'object_typeid', 'showid', 'sort'], - 'objectTypeId': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c', - 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', - 'parents': [ - {'entityId': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - {'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'entityType': 'show', 'parentId': None}, - {'entityId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6'}, - {'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6'}, - {'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'} - ] -} - -add_example = { - 'action': 'add', - 'changes': { - 'bid': {'new': 0.0, 'old': None}, - 'context_type': {'new': 'task', 'old': None}, - 'description': {'new': '', 'old': None}, - 'enddate': {'new': None, 'old': None}, - 'id': {'new': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', 'old': None}, - 'isopen': {'new': False, 'old': None}, - 'isrequirecomment': {'new': False, 'old': None}, - 'name': {'new': 's001_ep_02_shot_0080', 'old': None}, - 'object_typeid': {'new': 'bad911de-3bd6-47b9-8b46-3476e237cb36', 'old': None}, - 'parent_id': {'new': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'old': None}, - 'priorityid': {'new': '9661b320-3a0c-11e2-81c1-0800200c9a66', 'old': None}, - 'showid': {'new': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'old': None}, - 'startdate': {'new': None, 'old': None}, - 'statusid': {'new': 'a0bbf0b4-15e2-11e1-b21a-0019bb4983d8', 'old': None}, - 'taskid': {'new': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', 'old': None}, - 'typeid': {'new': None, 'old': None} - }, - 'entityId': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Shot', - 'keys': ['startdate', 'showid', 'typeid', 'enddate', 'name', 'isopen', 'parent_id', 'context_type', 'bid', 'priorityid', 'statusid', 'isrequirecomment', 'object_typeid', 'taskid', 'id', 'description' ], - 'objectTypeId': 'bad911de-3bd6-47b9-8b46-3476e237cb36', - 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - 'parents': [ - {'entityId': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6' }, - {'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6'}, - {'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - {'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'entityType': 'show', 'parentId': None} - ] -} - -event_data = { - '_data': { - 'data': { - 'clientToken': 'dfd96508-06e6-11ea-94d1-3e41ec9bc0d6', - 'entities': [{ - 'action': 'update', - 'changes': {'name': {'new': 's001_ep_02_shot_00101', 'old': 's001_ep_02_shot_0010'}}, - 'entityId': 'f9388f70-fafb-11e9-9faa-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Shot', - 'keys': ['name'], - 'objectTypeId': 'bad911de-3bd6-47b9-8b46-3476e237cb36', - 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - 'parents': [{ - 'entityId': 'f9388f70-fafb-11e9-9faa-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6' - }, { - 'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6' - }, { - 'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6' - }, { - 'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', - 'entityType': 'show', - 'parentId': None - }] - }, { - 'action': 'move', - 'changes': { - 'parent_id': { - 'new': 'f911b742-fafb-11e9-9faa-3e41ec9bc0d6', - 'old': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6' - } - }, - 'entityId': '69dd9f1c-0638-11ea-97bc-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Shot', - 'keys': ['parent_id'], - 'objectTypeId': 'bad911de-3bd6-47b9-8b46-3476e237cb36', - 'parentId': 'f911b742-fafb-11e9-9faa-3e41ec9bc0d6', - 'parents': [{ - 'entityId': '69dd9f1c-0638-11ea-97bc-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': 'f911b742-fafb-11e9-9faa-3e41ec9bc0d6' - }, { - 'entityId': 'f911b742-fafb-11e9-9faa-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': 'f8fd8b82-fafb-11e9-9faa-3e41ec9bc0d6' - }, { - 'entityId': 'f8fd8b82-fafb-11e9-9faa-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6' - }, { - 'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', - 'entityType': 'show', - 'parentId': None - }] - }, { - 'action': 'add', - 'changes': { - 'bid': {'new': 0.0, 'old': None}, - 'context_type': {'new': 'task', 'old': None}, - 'description': {'new': '', 'old': None}, - 'enddate': {'new': None, 'old': None}, - 'id': {'new': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', 'old': None}, - 'isopen': {'new': False, 'old': None}, - 'isrequirecomment': {'new': False, 'old': None}, - 'name': {'new': 's001_ep_02_shot_0080', 'old': None}, - 'object_typeid': {'new': 'bad911de-3bd6-47b9-8b46-3476e237cb36', 'old': None}, - 'parent_id': {'new': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'old': None}, - 'priorityid': {'new': '9661b320-3a0c-11e2-81c1-0800200c9a66', 'old': None}, - 'showid': {'new': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'old': None}, - 'startdate': {'new': None, 'old': None}, - 'statusid': {'new': 'a0bbf0b4-15e2-11e1-b21a-0019bb4983d8', 'old': None}, - 'taskid': {'new': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', 'old': None}, - 'typeid': {'new': None, 'old': None} - }, - 'entityId': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Shot', - 'keys': ['startdate', 'showid', 'typeid', 'enddate', 'name', 'isopen', 'parent_id', 'context_type', 'bid', 'priorityid', 'statusid', 'isrequirecomment', 'object_typeid', 'taskid', 'id', 'description' ], - 'objectTypeId': 'bad911de-3bd6-47b9-8b46-3476e237cb36', - 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - 'parents': [ - {'entityId': '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6' }, - {'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6'}, - {'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - {'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'entityType': 'show', 'parentId': None} - ] - }, { - 'action': 'remove', - 'changes': { - 'bid': {'new': None, 'old': 0.0}, - 'context_type': {'new': None, 'old': 'task'}, - 'description': {'new': None, 'old': ''}, - 'enddate': {'new': None, 'old': None}, - 'id': {'new': None, 'old': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6'}, - 'isopen': {'new': None, 'old': False}, - 'isrequirecomment': {'new': None, 'old': False}, - 'name': {'new': None, 'old': 'compositing'}, - 'object_typeid': {'new': None, 'old': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c'}, - 'parent_id': {'new': None, 'old': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - 'priorityid': {'new': None, 'old': '9661b320-3a0c-11e2-81c1-0800200c9a66'}, - 'showid': {'new': None, 'old': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - 'sort': {'new': None, 'old': 0.0}, - 'startdate': {'new': None, 'old': None}, - 'statusid': {'new': None, 'old': '44dd9fb2-4164-11df-9218-0019bb4983d8'}, - 'taskid': {'new': None, 'old': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6'}, - 'thumbid': {'new': None, 'old': None}, - 'typeid': {'new': None, 'old': '44dd23b6-4164-11df-9218-0019bb4983d8'} - }, - 'entityId': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Task', - 'keys': ['id', 'taskid', 'thumbid', 'context_type', 'name', 'parent_id', 'bid', 'description', 'startdate', 'enddate', 'statusid', 'typeid', 'priorityid', 'isopen', 'isrequirecomment', 'object_typeid', 'showid', 'sort'], - 'objectTypeId': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c', - 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', - 'parents': [ - {'entityId': 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - {'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'entityType': 'show', 'parentId': None}, - {'entityId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6'}, - {'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6'}, - {'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'} - ] - }, { - 'action': 'remove', - 'changes': { - 'bid': {'new': None, 'old': 0.0}, - 'context_type': {'new': None, 'old': 'task'}, - 'description': {'new': None, 'old': ''}, - 'enddate': {'new': None, 'old': None}, - 'id': {'new': None, 'old': 'dd62a91a-06e6-11ea-a504-3e41ec9bc0d6'}, - 'isopen': {'new': None, 'old': False}, - 'isrequirecomment': {'new': None, 'old': False}, - 'name': {'new': None, 'old': 'modeling'}, - 'object_typeid': {'new': None, 'old': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c'}, - 'parent_id': {'new': None, 'old': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - 'priorityid': {'new': None, 'old': '9661b320-3a0c-11e2-81c1-0800200c9a66'}, - 'showid': {'new': None, 'old': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - 'sort': {'new': None, 'old': 0.0}, - 'startdate': {'new': None, 'old': None}, - 'statusid': {'new': None, 'old': '44dd9fb2-4164-11df-9218-0019bb4983d8'}, - 'taskid': {'new': None, 'old': 'dd62a91a-06e6-11ea-a504-3e41ec9bc0d6'}, - 'thumbid': {'new': None, 'old': None}, - 'typeid': {'new': None, 'old': '44dc53c8-4164-11df-9218-0019bb4983d8'} - }, - 'entityId': 'dd62a91a-06e6-11ea-a504-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Task', - 'keys': ['id', 'taskid', 'thumbid', 'context_type', 'name', 'parent_id', 'bid', 'description', 'startdate', 'enddate', 'statusid', 'typeid', 'priorityid', 'isopen', 'isrequirecomment', 'object_typeid', 'showid', 'sort'], - 'objectTypeId': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c', - 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', - 'parents': [ - {'entityId': 'dd62a91a-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - {'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'entityType': 'show', 'parentId': None}, - {'entityId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6'}, - {'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6'}, - {'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'} - ] - }, { - 'action': 'remove', - 'changes': { - 'bid': {'new': None, 'old': 0.0}, - 'context_type': {'new': None, 'old': 'task'}, - 'description': {'new': None, 'old': ''}, - 'enddate': {'new': None, 'old': None}, - 'id': {'new': None, 'old': 'dd6dc9e4-06e6-11ea-a504-3e41ec9bc0d6'}, - 'isopen': {'new': None, 'old': False}, - 'isrequirecomment': {'new': None, 'old': False}, - 'name': {'new': None, 'old': 'texture'}, - 'object_typeid': {'new': None, 'old': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c'}, - 'parent_id': {'new': None, 'old': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - 'priorityid': {'new': None, 'old': '9661b320-3a0c-11e2-81c1-0800200c9a66'}, - 'showid': {'new': None, 'old': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - 'sort': {'new': None, 'old': 0.0}, - 'startdate': {'new': None, 'old': None}, - 'statusid': {'new': None, 'old': '44dd9fb2-4164-11df-9218-0019bb4983d8'}, - 'taskid': {'new': None, 'old': 'dd6dc9e4-06e6-11ea-a504-3e41ec9bc0d6'}, - 'thumbid': {'new': None, 'old': None}, - 'typeid': {'new': None, 'old': 'c09dfaa8-ac6d-11e7-bd00-0a580aa00e67'} - }, - 'entityId': 'dd6dc9e4-06e6-11ea-a504-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Task', - 'keys': ['id', 'taskid', 'thumbid', 'context_type', 'name', 'parent_id', 'bid', 'description', 'startdate', 'enddate', 'statusid', 'typeid', 'priorityid', 'isopen', 'isrequirecomment', 'object_typeid', 'showid', 'sort'], - 'objectTypeId': '11c137c0-ee7e-4f9c-91c5-8c77cec22b2c', - 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', - 'parents': [ - {'entityId': 'dd6dc9e4-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - {'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'entityType': 'show', 'parentId': None}, - {'entityId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6'}, - {'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6'}, - {'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'} - ] - }, { - 'action': 'remove', - 'changes': { - 'bid': {'new': None, 'old': 0.0}, - 'context_type': {'new': None, 'old': 'task'}, - 'description': {'new': None, 'old': ''}, - 'enddate': {'new': None, 'old': None}, - 'id': {'new': None, 'old': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - 'isopen': {'new': None, 'old': False}, - 'isrequirecomment': {'new': None, 'old': False}, - 'name': {'new': None, 'old': 's001_ep_02_shot_0070'}, - 'object_typeid': {'new': None, 'old': 'bad911de-3bd6-47b9-8b46-3476e237cb36'}, - 'parent_id': {'new': None, 'old': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6'}, - 'priorityid': {'new': None, 'old': '9661b320-3a0c-11e2-81c1-0800200c9a66'}, - 'showid': {'new': None, 'old': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - 'sort': {'new': None, 'old': 0.0}, - 'startdate': {'new': None, 'old': None}, - 'statusid': {'new': None, 'old': 'a0bbf0b4-15e2-11e1-b21a-0019bb4983d8'}, - 'taskid': {'new': None, 'old': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6'}, - 'thumbid': {'new': None, 'old': None}, - 'typeid': {'new': None, 'old': None} - }, - 'entityId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Shot', - 'keys': ['id', 'taskid', 'thumbid', 'context_type', 'name', 'parent_id', 'bid', 'description', 'startdate', 'enddate', 'statusid', 'typeid', 'priorityid', 'isopen', 'isrequirecomment', 'object_typeid', 'showid', 'sort'], - 'objectTypeId': 'bad911de-3bd6-47b9-8b46-3476e237cb36', - 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - 'parents': [ - {'entityId': 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', 'entityType': 'task', 'parentId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6'}, - {'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6'}, - {'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', 'entityType': 'task', 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6'}, - {'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', 'entityType': 'show', 'parentId': None } - ] - }], - 'parents': [ - 'dd62a91a-06e6-11ea-a504-3e41ec9bc0d6', - 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - '69dd9f1c-0638-11ea-97bc-3e41ec9bc0d6', - 'f911b742-fafb-11e9-9faa-3e41ec9bc0d6', - 'f8fd8b82-fafb-11e9-9faa-3e41ec9bc0d6', - '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', - 'dd6dc9e4-06e6-11ea-a504-3e41ec9bc0d6', - 'dd562c80-06e6-11ea-a504-3e41ec9bc0d6', - '872e0a9c-06e8-11ea-b67a-3e41ec9bc0d6', - '83f56510-0638-11ea-9d25-3e41ec9bc0d6', - 'dd784a4a-06e6-11ea-a504-3e41ec9bc0d6', - 'f9388f70-fafb-11e9-9faa-3e41ec9bc0d6' - ], - 'pushToken': 'e0bf693606e611ea8f823e41ec9bc0d6', - 'user': { - 'name': 'Kuba Trllo', - 'userid': '2a8ae090-cbd3-11e8-a87a-0a580aa00121' - } - }, - 'id': '978b06c4351a4dca80f3fefb75f0ac62', - 'in_reply_to_event': None, - 'sent': None, - 'source': { - 'applicationId': 'ftrack.client.web', - 'id': 'dfd96508-06e6-11ea-94d1-3e41ec9bc0d6', - 'user': { - 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121', - 'username': 'jakub.trllo' - } - }, - 'target': '', - 'topic': 'ftrack.update' - }, - '_stopped': False -} - - -custom_attr_event = { - '_data': { - 'data': { - 'clientToken': 'dfd96508-06e6-11ea-94d1-3e41ec9bc0d6', - 'entities': [{ - 'action': 'update', - 'changes': { - 'frameEnd': {'new': '1010', 'old': ''} - }, - 'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - 'entityType': 'task', - 'entity_type': 'Episode', - 'keys': ['frameEnd'], - 'objectTypeId': '22139355-61da-4c8f-9db4-3abc870166bc', - 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', - 'parents': [{ - 'entityId': 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6' - }, { - 'entityId': '83f56510-0638-11ea-9d25-3e41ec9bc0d6', - 'entityType': 'task', - 'parentId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6' - }, { - 'entityId': '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6', - 'entityType': 'show', - 'parentId': None - }] - }], - 'parents': [ - 'f935a8b4-fafb-11e9-9faa-3e41ec9bc0d6', - '83f56510-0638-11ea-9d25-3e41ec9bc0d6', - '24b76ed8-fafb-11e9-9a47-3e41ec9bc0d6' - ], - 'pushToken': 'e0bfec8a-06e6-11ea-8f82-3e41ec9bc0d6', - 'user': { - 'name': 'Kuba Trllo', - 'userid': '2a8ae090-cbd3-11e8-a87a-0a580aa00121' - } - }, - 'id': 'c21ef98e3f634c20ae18509052662345', - 'in_reply_to_event': None, - 'sent': None, - 'source': { - 'applicationId': 'ftrack.client.web', - 'id': 'dfd96508-06e6-11ea-94d1-3e41ec9bc0d6', - 'user': { - 'id': '2a8ae090-cbd3-11e8-a87a-0a580aa00121', - 'username': 'jakub.trllo' - } - }, - 'target': '', - 'topic': 'ftrack.update' - }, - '_stopped': False -}