diff --git a/pype/ftrack/__init__.py b/pype/ftrack/__init__.py
index 45ca8384b5..aa8f04bffb 100644
--- a/pype/ftrack/__init__.py
+++ b/pype/ftrack/__init__.py
@@ -1,2 +1,2 @@
from .lib import *
-from .ftrack_server import FtrackServer
+from .ftrack_server import FtrackServer, check_ftrack_url
diff --git a/pype/ftrack/actions/action_delete_asset.py b/pype/ftrack/actions/action_delete_asset.py
index 106c81758a..fb87e3ed7c 100644
--- a/pype/ftrack/actions/action_delete_asset.py
+++ b/pype/ftrack/actions/action_delete_asset.py
@@ -277,10 +277,7 @@ class DeleteAsset(BaseAction):
'message': 'No entities to delete in avalon'
}
- or_subquery = []
- for id in all_ids:
- or_subquery.append({'_id': id})
- delete_query = {'$or': or_subquery}
+ delete_query = {'_id': {'$in': all_ids}}
self.db.delete_many(delete_query)
return {
diff --git a/pype/ftrack/actions/action_delete_asset_byname.py b/pype/ftrack/actions/action_delete_asset_byname.py
index 4a3807f8f0..66350d5480 100644
--- a/pype/ftrack/actions/action_delete_asset_byname.py
+++ b/pype/ftrack/actions/action_delete_asset_byname.py
@@ -97,10 +97,7 @@ class AssetsRemover(BaseAction):
'message': 'None of assets'
}
- or_subquery = []
- for id in all_ids:
- or_subquery.append({'_id': id})
- delete_query = {'$or': or_subquery}
+ delete_query = {'_id': {'$in': all_ids}}
self.db.delete_many(delete_query)
self.db.uninstall()
diff --git a/pype/ftrack/actions/action_job_killer.py b/pype/ftrack/actions/action_job_killer.py
index 56a280a663..c7839f5508 100644
--- a/pype/ftrack/actions/action_job_killer.py
+++ b/pype/ftrack/actions/action_job_killer.py
@@ -108,6 +108,7 @@ class JobKiller(BaseAction):
'Changing Job ({}) status: {} -> failed'
).format(job['id'], origin_status))
except Exception:
+ session.rollback()
self.log.warning((
'Changing Job ({}) has failed'
).format(job['id']))
diff --git a/pype/ftrack/actions/action_sync_hier_attrs_local.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py
deleted file mode 100644
index 289abd0122..0000000000
--- a/pype/ftrack/actions/action_sync_hier_attrs_local.py
+++ /dev/null
@@ -1,351 +0,0 @@
-import os
-import sys
-import json
-import argparse
-import logging
-import collections
-
-from pype.vendor import ftrack_api
-from pype.ftrack import BaseAction, lib
-from pype.ftrack.lib.io_nonsingleton import DbConnector
-from bson.objectid import ObjectId
-
-
-class SyncHierarchicalAttrs(BaseAction):
-
- db_con = DbConnector()
- ca_mongoid = lib.get_ca_mongoid()
-
- #: Action identifier.
- identifier = 'sync.hierarchical.attrs.local'
- #: Action label.
- label = "Pype Admin"
- variant = '- Sync Hier Attrs (Local)'
- #: Action description.
- description = 'Synchronize hierarchical attributes'
- #: Icon
- icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
- os.environ.get('PYPE_STATICS_SERVER', '')
- )
-
- #: roles that are allowed to register this action
- role_list = ['Pypeclub', 'Administrator', 'Project Manager']
-
- def discover(self, session, entities, event):
- ''' Validation '''
- for entity in entities:
- if (
- entity.get('context_type', '').lower() in ('show', 'task') and
- entity.entity_type.lower() != 'task'
- ):
- return True
- return False
-
- def launch(self, session, entities, event):
- self.interface_messages = {}
- user = session.query(
- 'User where id is "{}"'.format(event['source']['user']['id'])
- ).one()
-
- job = session.create('Job', {
- 'user': user,
- 'status': 'running',
- 'data': json.dumps({
- 'description': 'Sync Hierachical attributes'
- })
- })
- session.commit()
- self.log.debug('Job with id "{}" created'.format(job['id']))
-
- process_session = ftrack_api.Session(
- server_url=session.server_url,
- api_key=session.api_key,
- api_user=session.api_user,
- auto_connect_event_hub=True
- )
-
- try:
- # Collect hierarchical attrs
- self.log.debug('Collecting Hierarchical custom attributes started')
- custom_attributes = {}
- all_avalon_attr = process_session.query(
- 'CustomAttributeGroup where name is "avalon"'
- ).one()
-
- error_key = (
- 'Hierarchical attributes with set "default" value (not allowed)'
- )
-
- for cust_attr in all_avalon_attr['custom_attribute_configurations']:
- if 'avalon_' in cust_attr['key']:
- continue
-
- if not cust_attr['is_hierarchical']:
- continue
-
- if cust_attr['default']:
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
- self.interface_messages[error_key].append(
- cust_attr['label']
- )
-
- self.log.warning((
- 'Custom attribute "{}" has set default value.'
- ' This attribute can\'t be synchronized'
- ).format(cust_attr['label']))
- continue
-
- custom_attributes[cust_attr['key']] = cust_attr
-
- self.log.debug(
- 'Collecting Hierarchical custom attributes has finished'
- )
-
- if not custom_attributes:
- msg = 'No hierarchical attributes to sync.'
- self.log.debug(msg)
- return {
- 'success': True,
- 'message': msg
- }
-
- entity = entities[0]
- if entity.entity_type.lower() == 'project':
- project_name = entity['full_name']
- else:
- project_name = entity['project']['full_name']
-
- self.db_con.install()
- self.db_con.Session['AVALON_PROJECT'] = project_name
-
- _entities = self._get_entities(event, process_session)
-
- for entity in _entities:
- self.log.debug(30*'-')
- self.log.debug(
- 'Processing entity "{}"'.format(entity.get('name', entity))
- )
-
- ent_name = entity.get('name', entity)
- if entity.entity_type.lower() == 'project':
- ent_name = entity['full_name']
-
- for key in custom_attributes:
- self.log.debug(30*'*')
- self.log.debug(
- 'Processing Custom attribute key "{}"'.format(key)
- )
- # check if entity has that attribute
- if key not in entity['custom_attributes']:
- error_key = 'Missing key on entities'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- self.interface_messages[error_key].append(
- '- key: "{}" - entity: "{}"'.format(key, ent_name)
- )
-
- self.log.error((
- '- key "{}" not found on "{}"'
- ).format(key, ent_name))
- continue
-
- value = self.get_hierarchical_value(key, entity)
- if value is None:
- error_key = (
- 'Missing value for key on entity'
- ' and its parents (synchronization was skipped)'
- )
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- self.interface_messages[error_key].append(
- '- key: "{}" - entity: "{}"'.format(key, ent_name)
- )
-
- self.log.warning((
- '- key "{}" not set on "{}" or its parents'
- ).format(key, ent_name))
- continue
-
- self.update_hierarchical_attribute(entity, key, value)
-
- job['status'] = 'done'
- session.commit()
-
- except Exception:
- self.log.error(
- 'Action "{}" failed'.format(self.label),
- exc_info=True
- )
-
- finally:
- self.db_con.uninstall()
-
- if job['status'] in ('queued', 'running'):
- job['status'] = 'failed'
- session.commit()
- if self.interface_messages:
- title = "Errors during SyncHierarchicalAttrs"
- self.show_interface_from_dict(
- messages=self.interface_messages, title=title, event=event
- )
-
- return True
-
- def get_hierarchical_value(self, key, entity):
- value = entity['custom_attributes'][key]
- if (
- value is not None or
- entity.entity_type.lower() == 'project'
- ):
- return value
-
- return self.get_hierarchical_value(key, entity['parent'])
-
- def update_hierarchical_attribute(self, entity, key, value):
- if (
- entity['context_type'].lower() not in ('show', 'task') or
- entity.entity_type.lower() == 'task'
- ):
- return
-
- ent_name = entity.get('name', entity)
- if entity.entity_type.lower() == 'project':
- ent_name = entity['full_name']
-
- hierarchy = '/'.join(
- [a['name'] for a in entity.get('ancestors', [])]
- )
- if hierarchy:
- hierarchy = '/'.join(
- [entity['project']['full_name'], hierarchy, entity['name']]
- )
- elif entity.entity_type.lower() == 'project':
- hierarchy = entity['full_name']
- else:
- hierarchy = '/'.join(
- [entity['project']['full_name'], entity['name']]
- )
-
- self.log.debug('- updating entity "{}"'.format(hierarchy))
-
- # collect entity's custom attributes
- custom_attributes = entity.get('custom_attributes')
- if not custom_attributes:
- return
-
- mongoid = custom_attributes.get(self.ca_mongoid)
- if not mongoid:
- error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- if ent_name not in self.interface_messages[error_key]:
- self.interface_messages[error_key].append(ent_name)
-
- self.log.warning(
- '-- entity "{}" is not synchronized to avalon. Skipping'.format(
- ent_name
- )
- )
- return
-
- try:
- mongoid = ObjectId(mongoid)
- except Exception:
- error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- if ent_name not in self.interface_messages[error_key]:
- self.interface_messages[error_key].append(ent_name)
-
- self.log.warning(
- '-- entity "{}" has stored invalid MongoID. Skipping'.format(
- ent_name
- )
- )
- return
- # Find entity in Mongo DB
- mongo_entity = self.db_con.find_one({'_id': mongoid})
- if not mongo_entity:
- error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- if ent_name not in self.interface_messages[error_key]:
- self.interface_messages[error_key].append(ent_name)
-
- self.log.warning(
- '-- entity "{}" was not found in DB by id "{}". Skipping'.format(
- ent_name, str(mongoid)
- )
- )
- return
-
- # Change value if entity has set it's own
- entity_value = custom_attributes[key]
- if entity_value is not None:
- value = entity_value
-
- data = mongo_entity.get('data') or {}
-
- data[key] = value
- self.db_con.update_many(
- {'_id': mongoid},
- {'$set': {'data': data}}
- )
-
- self.log.debug(
- '-- stored value "{}"'.format(value)
- )
-
- for child in entity.get('children', []):
- self.update_hierarchical_attribute(child, key, value)
-
-
-def register(session, plugins_presets={}):
- '''Register plugin. Called when used as an plugin.'''
-
- SyncHierarchicalAttrs(session, plugins_presets).register()
-
-
-def main(arguments=None):
- '''Set up logging and register action.'''
- if arguments is None:
- arguments = []
-
- parser = argparse.ArgumentParser()
- # Allow setting of logging level from arguments.
- loggingLevels = {}
- for level in (
- logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
- logging.ERROR, logging.CRITICAL
- ):
- loggingLevels[logging.getLevelName(level).lower()] = level
-
- parser.add_argument(
- '-v', '--verbosity',
- help='Set the logging output verbosity.',
- choices=loggingLevels.keys(),
- default='info'
- )
- namespace = parser.parse_args(arguments)
-
- # Set up basic logging
- logging.basicConfig(level=loggingLevels[namespace.verbosity])
-
- session = ftrack_api.Session()
- register(session)
-
- # Wait for events
- logging.info(
- 'Registered actions and listening for events. Use Ctrl-C to abort.'
- )
- session.event_hub.wait()
-
-
-if __name__ == '__main__':
- raise SystemExit(main(sys.argv[1:]))
diff --git a/pype/ftrack/actions/action_sync_to_avalon.py b/pype/ftrack/actions/action_sync_to_avalon.py
new file mode 100644
index 0000000000..12d84ce0ca
--- /dev/null
+++ b/pype/ftrack/actions/action_sync_to_avalon.py
@@ -0,0 +1,2327 @@
+import os
+import collections
+import re
+import queue
+import time
+import toml
+import traceback
+
+from bson.objectid import ObjectId
+from bson.errors import InvalidId
+from pymongo import UpdateOne
+
+import avalon
+from pype.ftrack import BaseAction
+from pype.ftrack.lib.io_nonsingleton import DbConnector
+from pype.vendor import ftrack_api
+from pype.vendor.ftrack_api import session as fa_session
+from pypeapp import Anatomy
+
+
+class SyncEntitiesFactory:
+ dbcon = DbConnector()
+
+ project_query = (
+ "select full_name, name, custom_attributes"
+ ", project_schema._task_type_schema.types.name"
+ " from Project where full_name is \"{}\""
+ )
+ entities_query = (
+ "select id, name, parent_id, link"
+ " from TypedContext where project_id is \"{}\""
+ )
+ ignore_custom_attr_key = "avalon_ignore_sync"
+ id_cust_attr = "avalon_mongo_id"
+
+ entity_schemas = {
+ "project": "avalon-core:project-2.0",
+ "asset": "avalon-core:asset-3.0",
+ "config": "avalon-core:config-1.0"
+ }
+
+ report_splitter = {"type": "label", "value": "---"}
+
+ def __init__(self, log_obj, _session, project_full_name):
+ self.log = log_obj
+ self.session = ftrack_api.Session(
+ server_url=_session.server_url,
+ api_key=_session.api_key,
+ api_user=_session.api_user,
+ auto_connect_event_hub=True
+ )
+
+ self.cancel_auto_sync = False
+
+ self.schema_patterns = {}
+ self.duplicates = {}
+ self.failed_regex = {}
+ self.tasks_failed_regex = collections.defaultdict(list)
+ self.report_items = {
+ "info": collections.defaultdict(list),
+ "warning": collections.defaultdict(list),
+ "error": collections.defaultdict(list)
+ }
+
+ self.create_list = []
+ self.recreated_ftrack_ents = {}
+ self.updates = collections.defaultdict(dict)
+
+ self._avalon_ents_by_id = None
+ self._avalon_ents_by_ftrack_id = None
+ self._avalon_ents_by_name = None
+ self._avalon_ents_by_parent_id = None
+
+ self._avalon_archived_ents = None
+ self._avalon_archived_by_id = None
+ self._avalon_archived_by_parent_id = None
+ self._avalon_archived_by_name = None
+
+ self._subsets_by_parent_id = None
+ self._changeability_by_mongo_id = None
+
+ self.all_filtered_entities = {}
+ # self.all_filtered_ids = []
+ self.filtered_ids = []
+ self.not_selected_ids = []
+
+ self._ent_pats_by_ftrack_id = {}
+
+ # Get Ftrack project
+ ft_project = self.session.query(
+ self.project_query.format(project_full_name)
+ ).one()
+ ft_project_id = ft_project["id"]
+
+ # Skip if project is ignored
+ if ft_project["custom_attributes"].get(
+ self.ignore_custom_attr_key
+ ) is True:
+ msg = (
+ "Project \"{}\" has set `Ignore Sync` custom attribute to True"
+ ).format(project_full_name)
+ self.log.warning(msg)
+ return {"success": False, "message": msg}
+
+ # Check if `avalon_mongo_id` custom attribute exist or is accessible
+ if self.id_cust_attr not in ft_project["custom_attributes"]:
+ items = []
+ items.append({
+ "type": "label",
+ "value": "# Can't access Custom attribute <{}>".format(
+ self.id_cust_attr
+ )
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "
- Check if user \"{}\" has permissions"
+ " to access the Custom attribute
"
+ ).format(_session.api_key)
+ })
+ items.append({
+ "type": "label",
+ "value": "- Check if the Custom attribute exist
"
+ })
+ return {
+ "items": items,
+ "title": "Synchronization failed",
+ "success": False,
+ "message": "Synchronization failed"
+ }
+
+ # Find all entities in project
+ all_project_entities = self.session.query(
+ self.entities_query.format(ft_project_id)
+ ).all()
+
+ # Store entities by `id` and `parent_id`
+ entities_dict = collections.defaultdict(lambda: {
+ "children": list(),
+ "parent_id": None,
+ "entity": None,
+ "entity_type": None,
+ "name": None,
+ "custom_attributes": {},
+ "hier_attrs": {},
+ "avalon_attrs": {},
+ "tasks": []
+ })
+
+ for entity in all_project_entities:
+ parent_id = entity["parent_id"]
+ entity_type = entity.entity_type
+ entity_type_low = entity_type.lower()
+ if entity_type_low == "task":
+ entities_dict[parent_id]["tasks"].append(entity["name"])
+ continue
+
+ entity_id = entity["id"]
+ entities_dict[entity_id].update({
+ "entity": entity,
+ "parent_id": parent_id,
+ "entity_type": entity_type_low,
+ "entity_type_orig": entity_type,
+ "name": entity["name"]
+ })
+ entities_dict[parent_id]["children"].append(entity_id)
+
+ entities_dict[ft_project_id]["entity"] = ft_project
+ entities_dict[ft_project_id]["entity_type"] = (
+ ft_project.entity_type.lower()
+ )
+ entities_dict[ft_project_id]["entity_type_orig"] = (
+ ft_project.entity_type
+ )
+ entities_dict[ft_project_id]["name"] = ft_project["full_name"]
+
+ self.ft_project_id = ft_project_id
+ self.entities_dict = entities_dict
+
+ @property
+ def avalon_ents_by_id(self):
+ if self._avalon_ents_by_id is None:
+ self._avalon_ents_by_id = {}
+ for entity in self.avalon_entities:
+ self._avalon_ents_by_id[str(entity["_id"])] = entity
+
+ return self._avalon_ents_by_id
+
+ @property
+ def avalon_ents_by_ftrack_id(self):
+ if self._avalon_ents_by_ftrack_id is None:
+ self._avalon_ents_by_ftrack_id = {}
+ for entity in self.avalon_entities:
+ key = entity.get("data", {}).get("ftrackId")
+ if not key:
+ continue
+ self._avalon_ents_by_ftrack_id[key] = str(entity["_id"])
+
+ return self._avalon_ents_by_ftrack_id
+
+ @property
+ def avalon_ents_by_name(self):
+ if self._avalon_ents_by_name is None:
+ self._avalon_ents_by_name = {}
+ for entity in self.avalon_entities:
+ self._avalon_ents_by_name[entity["name"]] = str(entity["_id"])
+
+ return self._avalon_ents_by_name
+
+ @property
+ def avalon_ents_by_parent_id(self):
+ if self._avalon_ents_by_parent_id is None:
+ self._avalon_ents_by_parent_id = collections.defaultdict(list)
+ for entity in self.avalon_entities:
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is not None:
+ parent_id = str(parent_id)
+ self._avalon_ents_by_parent_id[parent_id].append(entity)
+
+ return self._avalon_ents_by_parent_id
+
+ @property
+ def avalon_archived_ents(self):
+ if self._avalon_archived_ents is None:
+ self._avalon_archived_ents = [
+ ent for ent in self.dbcon.find({"type": "archived_asset"})
+ ]
+ return self._avalon_archived_ents
+
+ @property
+ def avalon_archived_by_name(self):
+ if self._avalon_archived_by_name is None:
+ self._avalon_archived_by_name = collections.defaultdict(list)
+ for ent in self.avalon_archived_ents:
+ self._avalon_archived_by_name[ent["name"]].append(ent)
+ return self._avalon_archived_by_name
+
+ @property
+ def avalon_archived_by_id(self):
+ if self._avalon_archived_by_id is None:
+ self._avalon_archived_by_id = {
+ str(ent["_id"]): ent for ent in self.avalon_archived_ents
+ }
+ return self._avalon_archived_by_id
+
+ @property
+ def avalon_archived_by_parent_id(self):
+ if self._avalon_archived_by_parent_id is None:
+ self._avalon_archived_by_parent_id = collections.defaultdict(list)
+ for entity in self.avalon_archived_ents:
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is not None:
+ parent_id = str(parent_id)
+ self._avalon_archived_by_parent_id[parent_id].append(entity)
+
+ return self._avalon_archived_by_parent_id
+
+ @property
+ def subsets_by_parent_id(self):
+ if self._subsets_by_parent_id is None:
+ self._subsets_by_parent_id = collections.defaultdict(list)
+ for subset in self.dbcon.find({"type": "subset"}):
+ self._subsets_by_parent_id[str(subset["parent"])].append(
+ subset
+ )
+
+ return self._subsets_by_parent_id
+
+ @property
+ def changeability_by_mongo_id(self):
+ if self._changeability_by_mongo_id is None:
+ self._changeability_by_mongo_id = collections.defaultdict(
+ lambda: True
+ )
+ self._changeability_by_mongo_id[self.avalon_project_id] = False
+ self._bubble_changeability(list(self.subsets_by_parent_id.keys()))
+ return self._changeability_by_mongo_id
+
+ @property
+ def all_ftrack_names(self):
+ return [
+ ent_dict["name"] for ent_dict in self.entities_dict.values() if (
+ ent_dict.get("name")
+ )
+ ]
+
+ def duplicity_regex_check(self):
+ self.log.debug("* Checking duplicities and invalid symbols")
+ # Duplicity and regex check
+ entity_ids_by_name = {}
+ duplicates = []
+ failed_regex = []
+ task_names = {}
+ for ftrack_id, entity_dict in self.entities_dict.items():
+ regex_check = True
+ name = entity_dict["name"]
+ entity_type = entity_dict["entity_type"]
+ # Tasks must be checked too
+ for task_name in entity_dict["tasks"]:
+ passed = task_names.get(task_name)
+ if passed is None:
+ passed = self.check_regex(task_name, "task")
+ task_names[task_name] = passed
+
+ if not passed:
+ self.tasks_failed_regex[task_name].append(ftrack_id)
+
+ if name in entity_ids_by_name:
+ duplicates.append(name)
+ else:
+ entity_ids_by_name[name] = []
+ regex_check = self.check_regex(name, entity_type)
+
+ entity_ids_by_name[name].append(ftrack_id)
+ if not regex_check:
+ failed_regex.append(name)
+
+ for name in failed_regex:
+ self.failed_regex[name] = entity_ids_by_name[name]
+
+ for name in duplicates:
+ self.duplicates[name] = entity_ids_by_name[name]
+
+ self.filter_by_duplicate_regex()
+
+ def check_regex(self, name, entity_type, in_schema=None):
+ schema_name = "asset-3.0"
+ if in_schema:
+ schema_name = in_schema
+ elif entity_type == "project":
+ schema_name = "project-2.0"
+ elif entity_type == "task":
+ schema_name = "task"
+
+ name_pattern = self.schema_patterns.get(schema_name)
+ if not name_pattern:
+ default_pattern = "^[a-zA-Z0-9_.]*$"
+ schema_obj = avalon.schema._cache.get(schema_name + ".json")
+ if not schema_obj:
+ name_pattern = default_pattern
+ else:
+ name_pattern = schema_obj.get(
+ "properties", {}).get(
+ "name", {}).get(
+ "pattern", default_pattern
+ )
+ self.schema_patterns[schema_name] = name_pattern
+
+ if re.match(name_pattern, name):
+ return True
+ return False
+
+ def filter_by_duplicate_regex(self):
+ filter_queue = queue.Queue()
+ failed_regex_msg = "{} - Entity has invalid symbols in the name"
+ duplicate_msg = "There are multiple entities with the name: \"{}\":"
+
+ for ids in self.failed_regex.values():
+ for id in ids:
+ ent_path = self.get_ent_path(id)
+ self.log.warning(failed_regex_msg.format(ent_path))
+ filter_queue.put(id)
+
+ for name, ids in self.duplicates.items():
+ self.log.warning(duplicate_msg.format(name))
+ for id in ids:
+ ent_path = self.get_ent_path(id)
+ self.log.warning(ent_path)
+ filter_queue.put(id)
+
+ filtered_ids = []
+ while not filter_queue.empty():
+ ftrack_id = filter_queue.get()
+ if ftrack_id in filtered_ids:
+ continue
+
+ entity_dict = self.entities_dict.pop(ftrack_id, {})
+ if not entity_dict:
+ continue
+
+ self.all_filtered_entities[ftrack_id] = entity_dict
+ parent_id = entity_dict.get("parent_id")
+ if parent_id and parent_id in self.entities_dict:
+ if ftrack_id in self.entities_dict[parent_id]["children"]:
+ self.entities_dict[parent_id]["children"].remove(ftrack_id)
+
+ filtered_ids.append(ftrack_id)
+ for child_id in entity_dict.get("children", []):
+ filter_queue.put(child_id)
+
+ # self.all_filtered_ids.extend(filtered_ids)
+
+ for name, ids in self.tasks_failed_regex.items():
+ for id in ids:
+ if id not in self.entities_dict:
+ continue
+ self.entities_dict[id]["tasks"].remove(name)
+ ent_path = self.get_ent_path(id)
+ self.log.warning(failed_regex_msg.format(
+ "/".join([ent_path, name])
+ ))
+
+ def filter_by_ignore_sync(self):
+ # skip filtering if `ignore_sync` attribute do not exist
+ if self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
+ self.ignore_custom_attr_key, "_notset_"
+ ) == "_notset_":
+ return
+
+ self.filter_queue = queue.Queue()
+ self.filter_queue.put((self.ft_project_id, False))
+ while not self.filter_queue.empty():
+ parent_id, remove = self.filter_queue.get()
+ if remove:
+ parent_dict = self.entities_dict.pop(parent_id, {})
+ self.all_filtered_entities[parent_id] = parent_dict
+ self.filtered_ids.append(parent_id)
+ else:
+ parent_dict = self.entities_dict.get(parent_id, {})
+
+ for child_id in parent_dict.get("children", []):
+ # keep original `remove` value for all childs
+ _remove = (remove is True)
+ if not _remove:
+ if self.entities_dict[child_id]["avalon_attrs"].get(
+ self.ignore_custom_attr_key
+ ):
+ self.entities_dict[parent_id]["children"].remove(
+ child_id
+ )
+ _remove = True
+ self.filter_queue.put((child_id, _remove))
+
+ # self.all_filtered_ids.extend(self.filtered_ids)
+
+ def filter_by_selection(self, event):
+ # BUGGY!!!! cause that entities are in deleted list
+ # TODO may be working when filtering happen after preparations
+ # - But this part probably does not have any functional reason
+ # - Time of synchronization probably won't be changed much
+ selected_ids = []
+ for entity in event["data"]["selection"]:
+ # Skip if project is in selection
+ if entity["entityType"] == "show":
+ return
+ selected_ids.append(entity["entityId"])
+
+ sync_ids = [self.ft_project_id]
+ parents_queue = queue.Queue()
+ children_queue = queue.Queue()
+ for id in selected_ids:
+ # skip if already filtered with ignore sync custom attribute
+ if id in self.filtered_ids:
+ continue
+
+ parents_queue.put(id)
+ children_queue.put(id)
+
+ while not parents_queue.empty():
+ id = parents_queue.get()
+ while True:
+ # Stops when parent is in sync_ids
+ if id in self.filtered_ids or id in sync_ids or id is None:
+ break
+ sync_ids.append(id)
+ id = self.entities_dict[id]["parent_id"]
+
+ while not children_queue.empty():
+ parent_id = children_queue.get()
+ for child_id in self.entities_dict[parent_id]["children"]:
+ if child_id in sync_ids or child_id in self.filtered_ids:
+ continue
+ sync_ids.append(child_id)
+ children_queue.put(child_id)
+
+ # separate not selected and to process entities
+ for key, value in self.entities_dict.items():
+ if key not in sync_ids:
+ self.not_selected_ids.append(key)
+
+ for id in self.not_selected_ids:
+ # pop from entities
+ value = self.entities_dict.pop(id)
+ # remove entity from parent's children
+ parent_id = value["parent_id"]
+ if parent_id not in sync_ids:
+ continue
+
+ self.entities_dict[parent_id]["children"].remove(id)
+
+ def set_cutom_attributes(self):
+ self.log.debug("* Preparing custom attributes")
+ # Get custom attributes and values
+ custom_attrs, hier_attrs = self.get_avalon_attr(True)
+ ent_types = self.session.query("select id, name from ObjectType").all()
+ ent_types_by_name = {
+ ent_type["name"]: ent_type["id"] for ent_type in ent_types
+ }
+
+ attrs = set()
+ # store default values per entity type
+ attrs_per_entity_type = collections.defaultdict(dict)
+ avalon_attrs = collections.defaultdict(dict)
+ # store also custom attribute configuration id for future use (create)
+ attrs_per_entity_type_ca_id = collections.defaultdict(dict)
+ avalon_attrs_ca_id = collections.defaultdict(dict)
+
+ for cust_attr in custom_attrs:
+ key = cust_attr["key"]
+ attrs.add(key)
+ ca_ent_type = cust_attr["entity_type"]
+ if key.startswith("avalon_"):
+ if ca_ent_type == "show":
+ avalon_attrs[ca_ent_type][key] = cust_attr["default"]
+ avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"]
+ else:
+ obj_id = cust_attr["object_type_id"]
+ avalon_attrs[obj_id][key] = cust_attr["default"]
+ avalon_attrs_ca_id[obj_id][key] = cust_attr["id"]
+ continue
+
+ if ca_ent_type == "show":
+ attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"]
+ attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"]
+ else:
+ obj_id = cust_attr["object_type_id"]
+ attrs_per_entity_type[obj_id][key] = cust_attr["default"]
+ attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"]
+
+ obj_id_ent_type_map = {}
+ sync_ids = []
+ for entity_id, entity_dict in self.entities_dict.items():
+ sync_ids.append(entity_id)
+ entity_type = entity_dict["entity_type"]
+ entity_type_orig = entity_dict["entity_type_orig"]
+
+ if entity_type == "project":
+ attr_key = "show"
+ else:
+ map_key = obj_id_ent_type_map.get(entity_type_orig)
+ if not map_key:
+ # Put space between capitals
+ # (e.g. 'AssetBuild' -> 'Asset Build')
+ map_key = re.sub(
+ r"(\w)([A-Z])", r"\1 \2", entity_type_orig
+ )
+ obj_id_ent_type_map[entity_type_orig] = map_key
+
+ # Get object id of entity type
+ attr_key = ent_types_by_name.get(map_key)
+
+ # Backup soluction when id is not found by prequeried objects
+ if not attr_key:
+ query = "ObjectType where name is \"{}\"".format(map_key)
+ attr_key = self.session.query(query).one()["id"]
+ ent_types_by_name[map_key] = attr_key
+
+ prepared_attrs = attrs_per_entity_type.get(attr_key)
+ prepared_avalon_attr = avalon_attrs.get(attr_key)
+ prepared_attrs_ca_id = attrs_per_entity_type_ca_id.get(attr_key)
+ prepared_avalon_attr_ca_id = avalon_attrs_ca_id.get(attr_key)
+ if prepared_attrs:
+ self.entities_dict[entity_id]["custom_attributes"] = (
+ prepared_attrs.copy()
+ )
+ if prepared_attrs_ca_id:
+ self.entities_dict[entity_id]["custom_attributes_id"] = (
+ prepared_attrs_ca_id.copy()
+ )
+ if prepared_avalon_attr:
+ self.entities_dict[entity_id]["avalon_attrs"] = (
+ prepared_avalon_attr.copy()
+ )
+ if prepared_avalon_attr_ca_id:
+ self.entities_dict[entity_id]["avalon_attrs_id"] = (
+ prepared_avalon_attr_ca_id.copy()
+ )
+
+ # TODO query custom attributes by entity_id
+ entity_ids_joined = ", ".join([
+ "\"{}\"".format(id) for id in sync_ids
+ ])
+ attributes_joined = ", ".join([
+ "\"{}\"".format(name) for name in attrs
+ ])
+
+ cust_attr_query = (
+ "select value, entity_id from CustomAttributeValue "
+ "where entity_id in ({}) and configuration.key in ({})"
+ )
+ [values] = self.session._call([{
+ "action": "query",
+ "expression": cust_attr_query.format(
+ entity_ids_joined, attributes_joined
+ )
+ }])
+
+ for value in values["data"]:
+ entity_id = value["entity_id"]
+ key = value["configuration"]["key"]
+ store_key = "custom_attributes"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ self.entities_dict[entity_id][store_key][key] = value["value"]
+
+ # process hierarchical attributes
+ self.set_hierarchical_attribute(hier_attrs, sync_ids)
+
+ def set_hierarchical_attribute(self, hier_attrs, sync_ids):
+ # collect all hierarchical attribute keys
+ # and prepare default values to project
+ attribute_names = []
+ for attr in hier_attrs:
+ key = attr["key"]
+ attribute_names.append(key)
+
+ store_key = "hier_attrs"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+
+ self.entities_dict[self.ft_project_id][store_key][key] = (
+ attr["default"]
+ )
+
+ # Prepare dict with all hier keys and None values
+ prepare_dict = {}
+ prepare_dict_avalon = {}
+ for attr in attribute_names:
+ if attr.startswith("avalon_"):
+ prepare_dict_avalon[attr] = None
+ else:
+ prepare_dict[attr] = None
+
+ for id, entity_dict in self.entities_dict.items():
+ # Skip project because has stored defaults at the moment
+ if entity_dict["entity_type"] == "project":
+ continue
+ entity_dict["hier_attrs"] = prepare_dict.copy()
+ for key, val in prepare_dict_avalon.items():
+ entity_dict["avalon_attrs"][key] = val
+
+ # Prepare values to query
+ entity_ids_joined = ", ".join([
+ "\"{}\"".format(id) for id in sync_ids
+ ])
+ attributes_joined = ", ".join([
+ "\"{}\"".format(name) for name in attribute_names
+ ])
+ [values] = self.session._call([{
+ "action": "query",
+ "expression": (
+ "select value, entity_id from CustomAttributeValue "
+ "where entity_id in ({}) and configuration.key in ({})"
+ ).format(entity_ids_joined, attributes_joined)
+ }])
+
+ avalon_hier = []
+ for value in values["data"]:
+ if value["value"] is None:
+ continue
+ entity_id = value["entity_id"]
+ key = value["configuration"]["key"]
+ store_key = "hier_attrs"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ avalon_hier.append(key)
+ self.entities_dict[entity_id][store_key][key] = value["value"]
+
+ # Get dictionary with not None hierarchical values to pull to childs
+ top_id = self.ft_project_id
+ project_values = {}
+ for key, value in self.entities_dict[top_id]["hier_attrs"].items():
+ if value is not None:
+ project_values[key] = value
+
+ for key in avalon_hier:
+ value = self.entities_dict[top_id]["avalon_attrs"][key]
+ if value is not None:
+ project_values[key] = value
+
+ hier_down_queue = queue.Queue()
+ hier_down_queue.put((project_values, top_id))
+
+ while not hier_down_queue.empty():
+ hier_values, parent_id = hier_down_queue.get()
+ for child_id in self.entities_dict[parent_id]["children"]:
+ _hier_values = hier_values.copy()
+ for name in attribute_names:
+ store_key = "hier_attrs"
+ if name.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ value = self.entities_dict[child_id][store_key][name]
+ if value is not None:
+ _hier_values[name] = value
+
+ self.entities_dict[child_id]["hier_attrs"].update(_hier_values)
+ hier_down_queue.put((_hier_values, child_id))
+
+ def remove_from_archived(self, mongo_id):
+ entity = self.avalon_archived_by_id.pop(mongo_id, None)
+ if not entity:
+ return
+
+ if self._avalon_archived_ents is not None:
+ if entity in self._avalon_archived_ents:
+ self._avalon_archived_ents.remove(entity)
+
+ if self._avalon_archived_by_name is not None:
+ name = entity["name"]
+ if name in self._avalon_archived_by_name:
+ name_ents = self._avalon_archived_by_name[name]
+ if entity in name_ents:
+ if len(name_ents) == 1:
+ self._avalon_archived_by_name.pop(name)
+ else:
+ self._avalon_archived_by_name[name].remove(entity)
+
+ # TODO use custom None instead of __NOTSET__
+ if self._avalon_archived_by_parent_id is not None:
+ parent_id = entity.get("data", {}).get(
+ "visualParent", "__NOTSET__"
+ )
+ if parent_id is not None:
+ parent_id = str(parent_id)
+
+ if parent_id in self._avalon_archived_by_parent_id:
+ parent_list = self._avalon_archived_by_parent_id[parent_id]
+ if entity not in parent_list:
+ self._avalon_archived_by_parent_id[parent_id].remove(
+ entity
+ )
+
+ def prepare_ftrack_ent_data(self):
+ not_set_ids = []
+ for id, entity_dict in self.entities_dict.items():
+ entity = entity_dict["entity"]
+ if entity is None:
+ not_set_ids.append(id)
+ continue
+
+ self.entities_dict[id]["final_entity"] = {}
+ self.entities_dict[id]["final_entity"]["name"] = (
+ entity_dict["name"]
+ )
+ data = {}
+ data["ftrackId"] = entity["id"]
+ data["entityType"] = entity_dict["entity_type_orig"]
+
+ for key, val in entity_dict.get("custom_attributes", []).items():
+ data[key] = val
+
+ for key, val in entity_dict.get("hier_attrs", []).items():
+ data[key] = val
+
+ if id == self.ft_project_id:
+ data["code"] = entity["name"]
+ self.entities_dict[id]["final_entity"]["data"] = data
+ self.entities_dict[id]["final_entity"]["type"] = "project"
+
+ proj_schema = entity["project_schema"]
+ task_types = proj_schema["_task_type_schema"]["types"]
+ self.entities_dict[id]["final_entity"]["config"] = {
+ "tasks": [{"name": tt["name"]} for tt in task_types],
+ "apps": self.get_project_apps(data)
+ }
+ continue
+
+ ent_path_items = [ent["name"] for ent in entity["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ hierarchy = ""
+ if len(parents) > 0:
+ hierarchy = os.path.sep.join(parents)
+
+ data["parents"] = parents
+ data["hierarchy"] = hierarchy
+ data["tasks"] = self.entities_dict[id].pop("tasks", [])
+ self.entities_dict[id]["final_entity"]["data"] = data
+ self.entities_dict[id]["final_entity"]["type"] = "asset"
+
+ if not_set_ids:
+ self.log.debug((
+ "- Debug information: Filtering bug, there are empty dicts"
+ "in entities dict (functionality should not be affected) <{}>"
+ ).format("| ".join(not_set_ids)))
+ for id in not_set_ids:
+ self.entities_dict.pop(id)
+
+ def get_project_apps(self, proj_data):
+ apps = []
+ missing_toml_msg = "Missing config file for application"
+ error_msg = (
+ "Unexpected error happend during preparation of application"
+ )
+ for app in proj_data.get("applications"):
+ try:
+ toml_path = avalon.lib.which_app(app)
+ # TODO report
+ if not toml_path:
+ self.log.warning(missing_toml_msg + '"{}"'.format(app))
+ self.report_items["warning"][missing_toml_msg].append(app)
+ continue
+
+ apps.append({
+ "name": app,
+ "label": toml.load(toml_path)["label"]
+ })
+ except Exception:
+ # TODO report
+ self.report_items["warning"][error_msg].append(app)
+ self.log.warning((
+ "Error has happened during preparing application \"{}\""
+ ).format(app), exc_info=True)
+ return apps
+
+ def get_ent_path(self, ftrack_id):
+ ent_path = self._ent_pats_by_ftrack_id.get(ftrack_id)
+ if not ent_path:
+ entity = self.entities_dict[ftrack_id]["entity"]
+ ent_path = "/".join(
+ [ent["name"] for ent in entity["link"]]
+ )
+ self._ent_pats_by_ftrack_id[ftrack_id] = ent_path
+
+ return ent_path
+
+ def prepare_avalon_entities(self, ft_project_name):
+ self.log.debug((
+ "* Preparing avalon entities "
+ "(separate to Create, Update and Deleted groups)"
+ ))
+ # Avalon entities
+ self.dbcon.install()
+ self.dbcon.Session["AVALON_PROJECT"] = ft_project_name
+ avalon_project = self.dbcon.find_one({"type": "project"})
+ avalon_entities = self.dbcon.find({"type": "asset"})
+ self.avalon_project = avalon_project
+ self.avalon_entities = avalon_entities
+
+ ftrack_avalon_mapper = {}
+ avalon_ftrack_mapper = {}
+ create_ftrack_ids = []
+ update_ftrack_ids = []
+
+ same_mongo_id = []
+ all_mongo_ids = {}
+ for ftrack_id, entity_dict in self.entities_dict.items():
+ mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
+ if not mongo_id:
+ continue
+ if mongo_id in all_mongo_ids:
+ same_mongo_id.append(mongo_id)
+ else:
+ all_mongo_ids[mongo_id] = []
+ all_mongo_ids[mongo_id].append(ftrack_id)
+
+ if avalon_project:
+ mongo_id = str(avalon_project["_id"])
+ ftrack_avalon_mapper[self.ft_project_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = self.ft_project_id
+ update_ftrack_ids.append(self.ft_project_id)
+ else:
+ create_ftrack_ids.append(self.ft_project_id)
+
+ # make it go hierarchically
+ prepare_queue = queue.Queue()
+
+ for child_id in self.entities_dict[self.ft_project_id]["children"]:
+ prepare_queue.put(child_id)
+
+ while not prepare_queue.empty():
+ ftrack_id = prepare_queue.get()
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ prepare_queue.put(child_id)
+
+ entity_dict = self.entities_dict[ftrack_id]
+ ent_path = self.get_ent_path(ftrack_id)
+
+ mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
+ av_ent_by_mongo_id = self.avalon_ents_by_id.get(mongo_id)
+ if av_ent_by_mongo_id:
+ av_ent_ftrack_id = av_ent_by_mongo_id.get("data", {}).get(
+ "ftrackId"
+ )
+ is_right = False
+ else_match_better = False
+ if av_ent_ftrack_id and av_ent_ftrack_id == ftrack_id:
+ is_right = True
+
+ elif mongo_id not in same_mongo_id:
+ is_right = True
+
+ else:
+ ftrack_ids_with_same_mongo = all_mongo_ids[mongo_id]
+ for _ftrack_id in ftrack_ids_with_same_mongo:
+ if _ftrack_id == av_ent_ftrack_id:
+ continue
+
+ _entity_dict = self.entities_dict[_ftrack_id]
+ _mongo_id = _entity_dict["avalon_attrs"][
+ self.id_cust_attr
+ ]
+ _av_ent_by_mongo_id = self.avalon_ents_by_id.get(
+ _mongo_id
+ )
+ _av_ent_ftrack_id = _av_ent_by_mongo_id.get(
+ "data", {}
+ ).get("ftrackId")
+ if _av_ent_ftrack_id == ftrack_id:
+ else_match_better = True
+ break
+
+ if not is_right and not else_match_better:
+ entity = entity_dict["entity"]
+ ent_path_items = [ent["name"] for ent in entity["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ av_parents = av_ent_by_mongo_id["data"]["parents"]
+ if av_parents == parents:
+ is_right = True
+ else:
+ name = entity_dict["name"]
+ av_name = av_ent_by_mongo_id["name"]
+ if name == av_name:
+ is_right = True
+
+ if is_right:
+ self.log.debug(
+ "Existing (by MongoID) <{}>".format(ent_path)
+ )
+ ftrack_avalon_mapper[ftrack_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = ftrack_id
+ update_ftrack_ids.append(ftrack_id)
+ continue
+
+ mongo_id = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if not mongo_id:
+ mongo_id = self.avalon_ents_by_name.get(entity_dict["name"])
+ if mongo_id:
+ self.log.debug(
+ "Existing (by matching name) <{}>".format(ent_path)
+ )
+ else:
+ self.log.debug(
+ "Existing (by FtrackID in mongo) <{}>".format(ent_path)
+ )
+
+ if mongo_id:
+ ftrack_avalon_mapper[ftrack_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = ftrack_id
+ update_ftrack_ids.append(ftrack_id)
+ continue
+
+ self.log.debug("New <{}>".format(ent_path))
+ create_ftrack_ids.append(ftrack_id)
+
+ deleted_entities = []
+ for mongo_id in self.avalon_ents_by_id:
+ if mongo_id in avalon_ftrack_mapper:
+ continue
+ deleted_entities.append(mongo_id)
+
+ av_ent = self.avalon_ents_by_id[mongo_id]
+ av_ent_path_items = [p for p in av_ent["data"]["parents"]]
+ av_ent_path_items.append(av_ent["name"])
+ self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items)))
+
+ self.ftrack_avalon_mapper = ftrack_avalon_mapper
+ self.avalon_ftrack_mapper = avalon_ftrack_mapper
+ self.create_ftrack_ids = create_ftrack_ids
+ self.update_ftrack_ids = update_ftrack_ids
+ self.deleted_entities = deleted_entities
+
+ self.log.debug((
+ "Ftrack -> Avalon comparison: New <{}> "
+ "| Existing <{}> | Deleted <{}>"
+ ).format(
+ len(create_ftrack_ids),
+ len(update_ftrack_ids),
+ len(deleted_entities)
+ ))
+
+ def filter_with_children(self, ftrack_id):
+ if ftrack_id not in self.entities_dict:
+ return
+ ent_dict = self.entities_dict[ftrack_id]
+ parent_id = ent_dict["parent_id"]
+ self.entities_dict[parent_id]["children"].remove(ftrack_id)
+
+ children_queue = queue.Queue()
+ children_queue.put(ftrack_id)
+ while not children_queue.empty():
+ _ftrack_id = children_queue.get()
+ entity_dict = self.entities_dict.pop(_ftrack_id, {"children": []})
+ for child_id in entity_dict["children"]:
+ children_queue.put(child_id)
+
+ def prepare_changes(self):
+ self.log.debug("* Preparing changes for avalon/ftrack")
+ hierarchy_changing_ids = []
+ ignore_keys = collections.defaultdict(list)
+
+ update_queue = queue.Queue()
+ for ftrack_id in self.update_ftrack_ids:
+ update_queue.put(ftrack_id)
+
+ while not update_queue.empty():
+ ftrack_id = update_queue.get()
+ if ftrack_id == self.ft_project_id:
+ changes = self.prepare_project_changes()
+ if changes:
+ self.updates[self.avalon_project_id] = changes
+ continue
+
+ ftrack_ent_dict = self.entities_dict[ftrack_id]
+
+ # *** check parents
+ parent_check = False
+
+ ftrack_parent_id = ftrack_ent_dict["parent_id"]
+ avalon_id = self.ftrack_avalon_mapper[ftrack_id]
+ avalon_entity = self.avalon_ents_by_id[avalon_id]
+ avalon_parent_id = avalon_entity["data"]["visualParent"]
+ if avalon_parent_id is not None:
+ avalon_parent_id = str(avalon_parent_id)
+
+ ftrack_parent_mongo_id = self.ftrack_avalon_mapper[
+ ftrack_parent_id
+ ]
+
+ # if parent is project
+ if (ftrack_parent_mongo_id == avalon_parent_id) or (
+ ftrack_parent_id == self.ft_project_id and
+ avalon_parent_id is None
+ ):
+ parent_check = True
+
+ # check name
+ ftrack_name = ftrack_ent_dict["name"]
+ avalon_name = avalon_entity["name"]
+ name_check = ftrack_name == avalon_name
+
+ # IDEAL STATE: both parent and name check passed
+ if parent_check and name_check:
+ continue
+
+ # If entity is changeable then change values of parent or name
+ if self.changeability_by_mongo_id[avalon_id]:
+ # TODO logging
+ if not parent_check:
+ if ftrack_parent_mongo_id == str(self.avalon_project_id):
+ new_parent_name = self.entities_dict[
+ self.ft_project_id]["name"]
+ new_parent_id = None
+ else:
+ new_parent_name = self.avalon_ents_by_id[
+ ftrack_parent_mongo_id]["name"]
+ new_parent_id = ObjectId(ftrack_parent_mongo_id)
+
+ if avalon_parent_id == str(self.avalon_project_id):
+ old_parent_name = self.entities_dict[
+ self.ft_project_id]["name"]
+ else:
+ old_parent_name = self.avalon_ents_by_id[
+ ftrack_parent_mongo_id]["name"]
+
+ self.updates[avalon_id]["data"] = {
+ "visualParent": new_parent_id
+ }
+ ignore_keys[ftrack_id].append("data.visualParent")
+ self.log.debug((
+ "Avalon entity \"{}\" changed parent \"{}\" -> \"{}\""
+ ).format(avalon_name, old_parent_name, new_parent_name))
+
+ if not name_check:
+ self.updates[avalon_id]["name"] = ftrack_name
+ ignore_keys[ftrack_id].append("name")
+ self.log.debug(
+ "Avalon entity \"{}\" was renamed to \"{}\"".format(
+ avalon_name, ftrack_name
+ )
+ )
+ continue
+
+ # parents and hierarchy must be recalculated
+ hierarchy_changing_ids.append(ftrack_id)
+
+ # Parent is project if avalon_parent_id is set to None
+ if avalon_parent_id is None:
+ avalon_parent_id = str(self.avalon_project_id)
+
+ if not name_check:
+ ent_path = self.get_ent_path(ftrack_id)
+ # TODO report
+ # TODO logging
+ self.entities_dict[ftrack_id]["name"] = avalon_name
+ self.entities_dict[ftrack_id]["entity"]["name"] = (
+ avalon_name
+ )
+ self.entities_dict[ftrack_id]["final_entity"]["name"] = (
+ avalon_name
+ )
+ self.log.warning("Name was changed back to {} <{}>".format(
+ avalon_name, ent_path
+ ))
+ self._ent_pats_by_ftrack_id.pop(ftrack_id, None)
+ msg = (
+ " It is not possible to change"
+ " the name of an entity or it's parents, "
+ " if it already contained published data."
+ )
+ self.report_items["warning"][msg].append(ent_path)
+
+ # skip parent oricessing if hierarchy didn't change
+ if parent_check:
+ continue
+
+ # Logic when parenting(hierarchy) has changed and should not
+ old_ftrack_parent_id = self.avalon_ftrack_mapper.get(
+ avalon_parent_id
+ )
+
+ # If last ftrack parent id from mongo entity exist then just
+ # remap paren_id on entity
+ if old_ftrack_parent_id:
+ # TODO report
+ # TODO logging
+ ent_path = self.get_ent_path(ftrack_id)
+ msg = (
+ " It is not possible"
+ " to change the hierarchy of an entity or it's parents,"
+ " if it already contained published data."
+ )
+ self.report_items["warning"][msg].append(ent_path)
+ self.log.warning((
+ " Entity contains published data so it was moved"
+ " back to it's original hierarchy <{}>"
+ ).format(ent_path))
+ self.entities_dict[ftrack_id]["entity"]["parent_id"] = (
+ old_ftrack_parent_id
+ )
+ self.entities_dict[ftrack_id]["parent_id"] = (
+ old_ftrack_parent_id
+ )
+ self.entities_dict[old_ftrack_parent_id][
+ "children"
+ ].append(ftrack_id)
+
+ continue
+
+ old_parent_ent = self.avalon_ents_by_id.get(avalon_parent_id)
+ if not old_parent_ent:
+ old_parent_ent = self.avalon_archived_by_id.get(
+ avalon_parent_id
+ )
+
+ # TODO report
+ # TODO logging
+ if not old_parent_ent:
+ self.log.warning((
+ "Parent entity was not found by id"
+ " - Trying to find by parent name"
+ ))
+ ent_path = self.get_ent_path(ftrack_id)
+
+ parents = avalon_entity["data"]["parents"]
+ parent_name = parents[-1]
+ matching_entity_id = None
+ for id, entity_dict in self.entities_dict.items():
+ if entity_dict["name"] == parent_name:
+ matching_entity_id = id
+ break
+
+ if matching_entity_id is None:
+ # TODO logging
+ # TODO report (turn off auto-sync?)
+ self.log.error((
+ "The entity contains published data but it was moved to"
+ " a different place in the hierarchy and it's previous"
+ " parent cannot be found."
+ " It's impossible to solve this programmatically <{}>"
+ ).format(ent_path))
+ msg = (
+ " Hierarchy of an entity" " can't be changed due to published data and missing"
+ " previous parent"
+ )
+ self.report_items["error"][msg].append(ent_path)
+ self.filter_with_children(ftrack_id)
+ continue
+
+ matching_ent_dict = self.entities_dict.get(matching_entity_id)
+ match_ent_parents = matching_ent_dict.get(
+ "final_entity", {}).get(
+ "data", {}).get(
+ "parents", ["__NOT_SET__"]
+ )
+ # TODO logging
+ # TODO report
+ if (
+ len(match_ent_parents) >= len(parents) or
+ match_ent_parents[:-1] != parents
+ ):
+ ent_path = self.get_ent_path(ftrack_id)
+ self.log.error((
+ "The entity contains published data but it was moved to"
+ " a different place in the hierarchy and it's previous"
+ " parents were moved too."
+ " It's impossible to solve this programmatically <{}>"
+ ).format(ent_path))
+ msg = (
+ " Hierarchy of an entity"
+ " can't be changed due to published data and scrambled"
+ "hierarchy"
+ )
+ continue
+
+ old_parent_ent = matching_ent_dict["final_entity"]
+
+ parent_id = self.ft_project_id
+ entities_to_create = []
+ # TODO logging
+ self.log.warning(
+ "Ftrack entities must be recreated because they were deleted,"
+ " but they contain published data."
+ )
+
+ _avalon_ent = old_parent_ent
+
+ self.updates[avalon_parent_id] = {"type": "asset"}
+ success = True
+ while True:
+ _vis_par = _avalon_ent["data"]["visualParent"]
+ _name = _avalon_ent["name"]
+ if _name in self.all_ftrack_names:
+ av_ent_path_items = _avalon_ent["data"]["parents"]
+ av_ent_path_items.append(_name)
+ av_ent_path = "/".join(av_ent_path_items)
+ # TODO report
+ # TODO logging
+ self.log.error((
+ "Can't recreate the entity in Ftrack because an entity" " with the same name already exists in a different"
+ " place in the hierarchy <{}>"
+ ).format(av_ent_path))
+ msg = (
+ " Hierarchy of an entity"
+ " can't be changed. I contains published data and it's" " previous parent had a name, that is duplicated at a "
+ " different hierarchy level"
+ )
+ self.report_items["error"][msg].append(av_ent_path)
+ self.filter_with_children(ftrack_id)
+ success = False
+ break
+
+ entities_to_create.append(_avalon_ent)
+ if _vis_par is None:
+ break
+
+ _vis_par = str(_vis_par)
+ _mapped = self.avalon_ftrack_mapper.get(_vis_par)
+ if _mapped:
+ parent_id = _mapped
+ break
+
+ _avalon_ent = self.avalon_ents_by_id.get(_vis_par)
+ if not _avalon_ent:
+ _avalon_ent = self.avalon_archived_by_id.get(_vis_par)
+
+ if success is False:
+ continue
+
+ new_entity_id = None
+ for av_entity in reversed(entities_to_create):
+ new_entity_id = self.create_ftrack_ent_from_avalon_ent(
+ av_entity, parent_id
+ )
+ update_queue.put(new_entity_id)
+
+ if new_entity_id:
+ ftrack_ent_dict["entity"]["parent_id"] = new_entity_id
+
+ if hierarchy_changing_ids:
+ self.reload_parents(hierarchy_changing_ids)
+
+ for ftrack_id in self.update_ftrack_ids:
+ if ftrack_id == self.ft_project_id:
+ continue
+
+ avalon_id = self.ftrack_avalon_mapper[ftrack_id]
+ avalon_entity = self.avalon_ents_by_id[avalon_id]
+
+ avalon_attrs = self.entities_dict[ftrack_id]["avalon_attrs"]
+ if (
+ self.id_cust_attr not in avalon_attrs or
+ avalon_attrs[self.id_cust_attr] != avalon_id
+ ):
+ configuration_id = self.entities_dict[ftrack_id][
+ "avalon_attrs_id"][self.id_cust_attr]
+
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": ftrack_id
+ })
+
+ self.session.recorded_operations.push(
+ fa_session.ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ fa_session.ftrack_api.symbol.NOT_SET,
+ avalon_id
+ )
+ )
+ # check rest of data
+ data_changes = self.compare_dict(
+ self.entities_dict[ftrack_id]["final_entity"],
+ avalon_entity,
+ ignore_keys[ftrack_id]
+ )
+ if data_changes:
+ self.updates[avalon_id] = self.merge_dicts(
+ data_changes,
+ self.updates[avalon_id]
+ )
+
+ def synchronize(self):
+ self.log.debug("* Synchronization begins")
+ avalon_project_id = self.ftrack_avalon_mapper.get(self.ft_project_id)
+ if avalon_project_id:
+ self.avalon_project_id = ObjectId(avalon_project_id)
+
+ # remove filtered ftrack ids from create/update list
+ for ftrack_id in self.all_filtered_entities:
+ if ftrack_id in self.create_ftrack_ids:
+ self.create_ftrack_ids.remove(ftrack_id)
+ elif ftrack_id in self.update_ftrack_ids:
+ self.update_ftrack_ids.remove(ftrack_id)
+
+ self.log.debug("* Processing entities for archivation")
+ self.delete_entities()
+
+ self.log.debug("* Processing new entities")
+ # Create not created entities
+ for ftrack_id in self.create_ftrack_ids:
+ # CHECK it is possible that entity was already created
+ # because is parent of another entity which was processed first
+ if ftrack_id in self.ftrack_avalon_mapper:
+ continue
+ self.create_avalon_entity(ftrack_id)
+
+ if len(self.create_list) > 0:
+ self.dbcon.insert_many(self.create_list)
+
+ self.session.commit()
+
+ self.log.debug("* Processing entities for update")
+ self.prepare_changes()
+ self.update_entities()
+ self.session.commit()
+
+ def create_avalon_entity(self, ftrack_id):
+ if ftrack_id == self.ft_project_id:
+ self.create_avalon_project()
+ return
+
+ entity_dict = self.entities_dict[ftrack_id]
+ parent_ftrack_id = entity_dict["parent_id"]
+ avalon_parent = None
+ if parent_ftrack_id != self.ft_project_id:
+ avalon_parent = self.ftrack_avalon_mapper.get(parent_ftrack_id)
+ # if not avalon_parent:
+ # self.create_avalon_entity(parent_ftrack_id)
+ # avalon_parent = self.ftrack_avalon_mapper[parent_ftrack_id]
+ avalon_parent = ObjectId(avalon_parent)
+
+ # avalon_archived_by_id avalon_archived_by_name
+ current_id = (
+ entity_dict["avalon_attrs"].get(self.id_cust_attr) or ""
+ ).strip()
+ mongo_id = current_id
+ name = entity_dict["name"]
+
+ # Check if exist archived asset in mongo - by ID
+ unarchive = False
+ unarchive_id = self.check_unarchivation(ftrack_id, mongo_id, name)
+ if unarchive_id is not None:
+ unarchive = True
+ mongo_id = unarchive_id
+
+ item = entity_dict["final_entity"]
+ try:
+ new_id = ObjectId(mongo_id)
+ if mongo_id in self.avalon_ftrack_mapper:
+ new_id = ObjectId()
+ except InvalidId:
+ new_id = ObjectId()
+
+ item["_id"] = new_id
+ item["parent"] = self.avalon_project_id
+ item["schema"] = self.entity_schemas["asset"]
+ item["data"]["visualParent"] = avalon_parent
+
+ new_id_str = str(new_id)
+ self.ftrack_avalon_mapper[ftrack_id] = new_id_str
+ self.avalon_ftrack_mapper[new_id_str] = ftrack_id
+
+ self._avalon_ents_by_id[new_id_str] = item
+ self._avalon_ents_by_ftrack_id[ftrack_id] = new_id_str
+ self._avalon_ents_by_name[item["name"]] = new_id_str
+
+ if current_id != new_id_str:
+ # store mongo id to ftrack entity
+ configuration_id = self.entities_dict[ftrack_id][
+ "avalon_attrs_id"
+ ][self.id_cust_attr]
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": ftrack_id
+ })
+
+ self.session.recorded_operations.push(
+ fa_session.ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ fa_session.ftrack_api.symbol.NOT_SET,
+ new_id_str
+ )
+ )
+
+ if unarchive is False:
+ self.create_list.append(item)
+ return
+ # If unarchive then replace entity data in database
+ self.dbcon.replace_one({"_id": new_id}, item)
+ self.remove_from_archived(mongo_id)
+ av_ent_path_items = item["data"]["parents"]
+ av_ent_path_items.append(item["name"])
+ av_ent_path = "/".join(av_ent_path_items)
+ self.log.debug("Entity was unarchived <{}>".format(av_ent_path))
+
+ def check_unarchivation(self, ftrack_id, mongo_id, name):
+ archived_by_id = self.avalon_archived_by_id.get(mongo_id)
+ archived_by_name = self.avalon_archived_by_name.get(name)
+
+ # if not found in archived then skip
+ if not archived_by_id and not archived_by_name:
+ return None
+
+ entity_dict = self.entities_dict[ftrack_id]
+
+ if archived_by_id:
+ # if is changeable then unarchive (nothing to check here)
+ if self.changeability_by_mongo_id[mongo_id]:
+ return mongo_id
+
+ # TODO replace `__NOTSET__` with custom None constant
+ archived_parent_id = archived_by_id["data"].get(
+ "visualParent", "__NOTSET__"
+ )
+ archived_parents = archived_by_id["data"].get("parents")
+ archived_name = archived_by_id["name"]
+
+ if (
+ archived_name != entity_dict["name"] or
+ archived_parents != entity_dict["final_entity"]["data"][
+ "parents"
+ ]
+ ):
+ return None
+
+ return mongo_id
+
+ # First check if there is any that have same parents
+ for archived in archived_by_name:
+ mongo_id = str(archived["_id"])
+ archived_parents = archived.get("data", {}).get("parents")
+ if (
+ archived_parents == entity_dict["final_entity"]["data"][
+ "parents"
+ ]
+ ):
+ return mongo_id
+
+ # Secondly try to find more close to current ftrack entity
+ first_changeable = None
+ for archived in archived_by_name:
+ mongo_id = str(archived["_id"])
+ if not self.changeability_by_mongo_id[mongo_id]:
+ continue
+
+ if first_changeable is None:
+ first_changeable = mongo_id
+
+ ftrack_parent_id = entity_dict["parent_id"]
+ map_ftrack_parent_id = self.ftrack_avalon_mapper.get(
+ ftrack_parent_id
+ )
+
+ # TODO replace `__NOTSET__` with custom None constant
+ archived_parent_id = archived.get("data", {}).get(
+ "visualParent", "__NOTSET__"
+ )
+ if archived_parent_id is not None:
+ archived_parent_id = str(archived_parent_id)
+
+ # skip if parent is archived - How this should be possible?
+ parent_entity = self.avalon_ents_by_id.get(archived_parent_id)
+ if (
+ parent_entity and (
+ map_ftrack_parent_id is not None and
+ map_ftrack_parent_id == str(parent_entity["_id"])
+ )
+ ):
+ return mongo_id
+ # Last return first changeable with same name (or None)
+ return first_changeable
+
+ def create_avalon_project(self):
+ project_item = self.entities_dict[self.ft_project_id]["final_entity"]
+ mongo_id = (
+ self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
+ self.id_cust_attr
+ ) or ""
+ ).strip()
+
+ try:
+ new_id = ObjectId(mongo_id)
+ except InvalidId:
+ new_id = ObjectId()
+
+ project_item["_id"] = new_id
+ project_item["parent"] = None
+ project_item["schema"] = self.entity_schemas["project"]
+ project_item["config"]["schema"] = self.entity_schemas["config"]
+ project_item["config"]["template"] = self.get_avalon_project_template()
+
+ self.ftrack_avalon_mapper[self.ft_project_id] = new_id
+ self.avalon_ftrack_mapper[new_id] = self.ft_project_id
+
+ self.avalon_project_id = new_id
+
+ self._avalon_ents_by_id[str(new_id)] = project_item
+ self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id)
+ self._avalon_ents_by_name[project_item["name"]] = str(new_id)
+
+ self.create_list.append(project_item)
+
+ # store mongo id to ftrack entity
+ entity = self.entities_dict[self.ft_project_id]["entity"]
+ entity["custom_attributes"][self.id_cust_attr] = str(new_id)
+
+ def get_avalon_project_template(self):
+ """Get avalon template
+ Returns:
+ dictionary with templates
+ """
+ project_name = self.entities_dict[self.ft_project_id]["name"]
+ templates = Anatomy(project_name).templates
+ return {
+ "workfile": templates["avalon"]["workfile"],
+ "work": templates["avalon"]["work"],
+ "publish": templates["avalon"]["publish"]
+ }
+
+ def _bubble_changeability(self, unchangeable_ids):
+ unchangeable_queue = queue.Queue()
+ for entity_id in unchangeable_ids:
+ unchangeable_queue.put((entity_id, False))
+
+ processed_parents_ids = []
+ subsets_to_remove = []
+ while not unchangeable_queue.empty():
+ entity_id, child_is_archived = unchangeable_queue.get()
+ # skip if already processed
+ if entity_id in processed_parents_ids:
+ continue
+
+ entity = self.avalon_ents_by_id.get(entity_id)
+ # if entity is not archived but unchageable child was then skip
+ # - archived entities should not affect not archived?
+ if entity and child_is_archived:
+ continue
+
+ # set changeability of current entity to False
+ self._changeability_by_mongo_id[entity_id] = False
+ processed_parents_ids.append(entity_id)
+ # if not entity then is probably archived
+ if not entity:
+ entity = self.avalon_archived_by_id.get(entity_id)
+ child_is_archived = True
+
+ if not entity:
+ # if entity is not found then it is subset without parent
+ if entity_id in unchangeable_ids:
+ subsets_to_remove.append(entity_id)
+ else:
+ # TODO logging - What is happening here?
+ self.log.warning((
+ "Avalon contains entities without valid parents that"
+ " lead to Project (should not cause errors)"
+ " - MongoId <{}>"
+ ).format(str(entity_id)))
+ continue
+
+ # skip if parent is project
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is None:
+ continue
+ unchangeable_queue.put((str(parent_id), child_is_archived))
+
+ self._delete_subsets_without_asset(subsets_to_remove)
+
+ def _delete_subsets_without_asset(self, not_existing_parents):
+ subset_ids = []
+ version_ids = []
+ repre_ids = []
+ to_delete = []
+
+ for parent_id in not_existing_parents:
+ subsets = self.subsets_by_parent_id.get(parent_id)
+ if not subsets:
+ continue
+ for subset in subsets:
+ if subset.get("type") != "subset":
+ continue
+ subset_ids.append(subset["_id"])
+
+ db_subsets = self.dbcon.find({
+ "_id": {"$in": subset_ids},
+ "type": "subset"
+ })
+ if not db_subsets:
+ return
+
+ db_versions = self.dbcon.find({
+ "parent": {"$in": subset_ids},
+ "type": "version"
+ })
+ if db_versions:
+ version_ids = [ver["_id"] for ver in db_versions]
+
+ db_repres = self.dbcon.find({
+ "parent": {"$in": version_ids},
+ "type": "representation"
+ })
+ if db_repres:
+ repre_ids = [repre["_id"] for repre in db_repres]
+
+ to_delete.extend(subset_ids)
+ to_delete.extend(version_ids)
+ to_delete.extend(repre_ids)
+
+ self.dbcon.delete_many({"_id": {"$in": to_delete}})
+
+ # Probably deprecated
+ def _check_changeability(self, parent_id=None):
+ for entity in self.avalon_ents_by_parent_id[parent_id]:
+ mongo_id = str(entity["_id"])
+ is_changeable = self._changeability_by_mongo_id.get(mongo_id)
+ if is_changeable is not None:
+ continue
+
+ self._check_changeability(mongo_id)
+ is_changeable = True
+ for child in self.avalon_ents_by_parent_id[parent_id]:
+ if not self._changeability_by_mongo_id[str(child["_id"])]:
+ is_changeable = False
+ break
+
+ if is_changeable is True:
+ is_changeable = (mongo_id in self.subsets_by_parent_id)
+ self._changeability_by_mongo_id[mongo_id] = is_changeable
+
+ def update_entities(self):
+ mongo_changes_bulk = []
+ for mongo_id, changes in self.updates.items():
+ filter = {"_id": ObjectId(mongo_id)}
+ change_data = self.from_dict_to_set(changes)
+ mongo_changes_bulk.append(UpdateOne(filter, change_data))
+
+ if not mongo_changes_bulk:
+ # TODO LOG
+ return
+ self.dbcon.bulk_write(mongo_changes_bulk)
+
+ def from_dict_to_set(self, data):
+ result = {"$set": {}}
+ dict_queue = queue.Queue()
+ dict_queue.put((None, data))
+
+ while not dict_queue.empty():
+ _key, _data = dict_queue.get()
+ for key, value in _data.items():
+ new_key = key
+ if _key is not None:
+ new_key = "{}.{}".format(_key, key)
+
+ if not isinstance(value, dict):
+ result["$set"][new_key] = value
+ continue
+ dict_queue.put((new_key, value))
+ return result
+
+ def reload_parents(self, hierarchy_changing_ids):
+ parents_queue = queue.Queue()
+ parents_queue.put((self.ft_project_id, [], False))
+ while not parents_queue.empty():
+ ftrack_id, parent_parents, changed = parents_queue.get()
+ _parents = parent_parents.copy()
+ if ftrack_id not in hierarchy_changing_ids and not changed:
+ if ftrack_id != self.ft_project_id:
+ _parents.append(self.entities_dict[ftrack_id]["name"])
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ parents_queue.put((child_id, _parents, changed))
+ continue
+
+ changed = True
+ parents = [par for par in _parents]
+ hierarchy = "/".join(parents)
+ self.entities_dict[ftrack_id][
+ "final_entity"]["data"]["parents"] = parents
+ self.entities_dict[ftrack_id][
+ "final_entity"]["data"]["hierarchy"] = hierarchy
+
+ _parents.append(self.entities_dict[ftrack_id]["name"])
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ parents_queue.put((child_id, _parents, changed))
+
+ if ftrack_id in self.create_ftrack_ids:
+ mongo_id = self.ftrack_avalon_mapper[ftrack_id]
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
+ self.updates[mongo_id]["data"]["parents"] = parents
+ self.updates[mongo_id]["data"]["hierarchy"] = hierarchy
+
+ def prepare_project_changes(self):
+ ftrack_ent_dict = self.entities_dict[self.ft_project_id]
+ ftrack_entity = ftrack_ent_dict["entity"]
+ avalon_code = self.avalon_project["data"]["code"]
+ # TODO Is possible to sync if full name was changed?
+ # if ftrack_ent_dict["name"] != self.avalon_project["name"]:
+ # ftrack_entity["full_name"] = avalon_name
+ # self.entities_dict[self.ft_project_id]["name"] = avalon_name
+ # self.entities_dict[self.ft_project_id]["final_entity"][
+ # "name"
+ # ] = avalon_name
+
+ # TODO logging
+ # TODO report
+ # TODO May this happen? Is possible to change project code?
+ if ftrack_entity["name"] != avalon_code:
+ ftrack_entity["name"] = avalon_code
+ self.entities_dict[self.ft_project_id]["final_entity"]["data"][
+ "code"
+ ] = avalon_code
+ self.session.commit()
+ sub_msg = (
+ "Project code was changed back to \"{}\"".format(avalon_code)
+ )
+ msg = (
+ "It is not possible to change"
+ " project code after synchronization"
+ )
+ self.report_items["warning"][msg] = sub_msg
+ self.log.warning(sub_msg)
+
+ return self.compare_dict(
+ self.entities_dict[self.ft_project_id]["final_entity"],
+ self.avalon_project
+ )
+
+ def compare_dict(self, dict_new, dict_old, _ignore_keys=[]):
+ # _ignore_keys may be used for keys nested dict like"data.visualParent"
+ changes = {}
+ ignore_keys = []
+ for key_val in _ignore_keys:
+ key_items = key_val.split(".")
+ if len(key_items) == 1:
+ ignore_keys.append(key_items[0])
+
+ for key, value in dict_new.items():
+ if key in ignore_keys:
+ continue
+
+ if key not in dict_old:
+ changes[key] = value
+ continue
+
+ if isinstance(value, dict):
+ if not isinstance(dict_old[key], dict):
+ changes[key] = value
+ continue
+
+ _new_ignore_keys = []
+ for key_val in _ignore_keys:
+ key_items = key_val.split(".")
+ if len(key_items) <= 1:
+ continue
+ _new_ignore_keys.append(".".join(key_items[1:]))
+
+ _changes = self.compare_dict(
+ value, dict_old[key], _new_ignore_keys
+ )
+ if _changes:
+ changes[key] = _changes
+ continue
+
+ if value != dict_old[key]:
+ changes[key] = value
+
+ return changes
+
+ def merge_dicts(self, dict_new, dict_old):
+ for key, value in dict_new.items():
+ if key not in dict_old:
+ dict_old[key] = value
+ continue
+
+ if isinstance(value, dict):
+ dict_old[key] = self.merge_dicts(value, dict_old[key])
+ continue
+
+ dict_old[key] = value
+
+ return dict_old
+
+ def delete_entities(self):
+ if not self.deleted_entities:
+ return
+ # Try to order so child is not processed before parent
+ deleted_entities = []
+ _deleted_entities = [id for id in self.deleted_entities]
+
+ while True:
+ if not _deleted_entities:
+ break
+ _ready = []
+ for mongo_id in _deleted_entities:
+ ent = self.avalon_ents_by_id[mongo_id]
+ vis_par = ent["data"]["visualParent"]
+ if (
+ vis_par is not None and
+ str(vis_par) in self.deleted_entities
+ ):
+ continue
+ _ready.append(mongo_id)
+
+ for id in _ready:
+ deleted_entities.append(id)
+ _deleted_entities.remove(id)
+
+ delete_ids = []
+ for mongo_id in deleted_entities:
+ # delete if they are deletable
+ if self.changeability_by_mongo_id[mongo_id]:
+ delete_ids.append(ObjectId(mongo_id))
+ continue
+
+ # check if any new created entity match same entity
+ # - name and parents must match
+ deleted_entity = self.avalon_ents_by_id[mongo_id]
+ name = deleted_entity["name"]
+ parents = deleted_entity["data"]["parents"]
+ similar_ent_id = None
+ for ftrack_id in self.create_ftrack_ids:
+ _ent_final = self.entities_dict[ftrack_id]["final_entity"]
+ if _ent_final["name"] != name:
+ continue
+ if _ent_final["data"]["parents"] != parents:
+ continue
+
+ # If in create is "same" then we can "archive" current
+ # since will be unarchived in create method
+ similar_ent_id = ftrack_id
+ break
+
+ # If similar entity(same name and parents) is in create
+ # entities list then just change from create to update
+ if similar_ent_id is not None:
+ self.create_ftrack_ids.remove(similar_ent_id)
+ self.update_ftrack_ids.append(similar_ent_id)
+ self.avalon_ftrack_mapper[mongo_id] = similar_ent_id
+ self.ftrack_avalon_mapper[similar_ent_id] = mongo_id
+ continue
+
+ found_by_name_id = None
+ for ftrack_id, ent_dict in self.entities_dict.items():
+ if not ent_dict.get("name"):
+ continue
+
+ if name == ent_dict["name"]:
+ found_by_name_id = ftrack_id
+ break
+
+ if found_by_name_id is not None:
+ # * THESE conditins are too complex to implement in first stage
+ # - probably not possible to solve if this happen
+ # if found_by_name_id in self.create_ftrack_ids:
+ # # reparent entity of the new one create?
+ # pass
+ #
+ # elif found_by_name_id in self.update_ftrack_ids:
+ # found_mongo_id = self.ftrack_avalon_mapper[found_by_name_id]
+ #
+ # ent_dict = self.entities_dict[found_by_name_id]
+
+ # TODO report - CRITICAL entity with same name alread exists in
+ # different hierarchy - can't recreate entity
+ continue
+
+ _vis_parent = str(deleted_entity["data"]["visualParent"])
+ if _vis_parent is None:
+ _vis_parent = self.avalon_project_id
+ ftrack_parent_id = self.avalon_ftrack_mapper[_vis_parent]
+ self.create_ftrack_ent_from_avalon_ent(
+ deleted_entity, ftrack_parent_id
+ )
+
+ filter = {"_id": {"$in": delete_ids}, "type": "asset"}
+ self.dbcon.update_many(filter, {"$set": {"type": "archived_asset"}})
+
+ def create_ftrack_ent_from_avalon_ent(self, av_entity, parent_id):
+ new_entity = None
+ parent_entity = self.entities_dict[parent_id]["entity"]
+
+ _name = av_entity["name"]
+ _type = av_entity["data"].get("entityType", "folder")
+
+ self.log.debug((
+ "Re-ceating deleted entity {} <{}>"
+ ).format(_name, _type))
+
+ new_entity = self.session.create(_type, {
+ "name": _name,
+ "parent": parent_entity
+ })
+
+ final_entity = {}
+ for k, v in av_entity.items():
+ final_entity[k] = v
+
+ if final_entity.get("type") != "asset":
+ final_entity["type"] = "asset"
+
+ new_entity_id = new_entity["id"]
+ new_entity_data = {
+ "entity": new_entity,
+ "parent_id": parent_id,
+ "entity_type": _type.lower(),
+ "entity_type_orig": _type,
+ "name": _name,
+ "final_entity": final_entity
+ }
+ for k, v in new_entity_data.items():
+ self.entities_dict[new_entity_id][k] = v
+
+ p_chilren = self.entities_dict[parent_id]["children"]
+ if new_entity_id not in p_chilren:
+ self.entities_dict[parent_id]["children"].append(new_entity_id)
+
+ cust_attr, hier_attrs = self.get_avalon_attr()
+ for _attr in cust_attr:
+ key = _attr["key"]
+ if key not in av_entity["data"]:
+ continue
+
+ if key not in new_entity["custom_attributes"]:
+ continue
+
+ value = av_entity["data"][key]
+ if not value:
+ continue
+
+ new_entity["custom_attributes"][key] = value
+
+ av_entity_id = str(av_entity["_id"])
+ new_entity["custom_attributes"][self.id_cust_attr] = av_entity_id
+
+ self.ftrack_avalon_mapper[new_entity_id] = av_entity_id
+ self.avalon_ftrack_mapper[av_entity_id] = new_entity_id
+
+ self.session.commit()
+
+ ent_path = self.get_ent_path(new_entity_id)
+ msg = (
+ "Deleted entity was recreated because it or its children"
+ " contain published data"
+ )
+
+ self.report_items["info"][msg].append(ent_path)
+
+ return new_entity_id
+
+ def regex_duplicate_interface(self):
+ items = []
+ if self.failed_regex or self.tasks_failed_regex:
+ subtitle = "Entity names contain prohibited symbols:"
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: You can use Letters( a-Z ),"
+ " Numbers( 0-9 ) and Underscore( _ )
"
+ )
+ })
+ log_msgs = []
+ for name, ids in self.failed_regex.items():
+ error_title = {
+ "type": "label",
+ "value": "## {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
+
+ for name, ids in self.tasks_failed_regex.items():
+ error_title = {
+ "type": "label",
+ "value": "## Task: {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ ent_path = "/".join([ent_path, name])
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
+
+ self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
+
+ if self.duplicates:
+ subtitle = "Duplicated entity names:"
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: It is not allowed to use the same name"
+ " for multiple entities in the same project
"
+ )
+ })
+ log_msgs = []
+ for name, ids in self.duplicates.items():
+ error_title = {
+ "type": "label",
+ "value": "## {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ", ".join(paths)))
+
+ self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
+
+ return items
+
+ def get_avalon_attr(self, split_hierarchical=True):
+ custom_attributes = []
+ hier_custom_attributes = []
+ cust_attrs_query = (
+ "select id, entity_type, object_type_id, is_hierarchical, default"
+ " from CustomAttributeConfiguration"
+ " where group.name = \"avalon\""
+ )
+ all_avalon_attr = self.session.query(cust_attrs_query).all()
+ for cust_attr in all_avalon_attr:
+ if split_hierarchical and cust_attr["is_hierarchical"]:
+ hier_custom_attributes.append(cust_attr)
+ continue
+
+ custom_attributes.append(cust_attr)
+
+ if split_hierarchical:
+ # return tuple
+ return custom_attributes, hier_custom_attributes
+
+ return custom_attributes
+
+ def report(self):
+ items = []
+ project_name = self.entities_dict[self.ft_project_id]["name"]
+ title = "Synchronization report ({}):".format(project_name)
+
+ keys = ["error", "warning", "info"]
+ for key in keys:
+ subitems = []
+ if key == "warning":
+ for _item in self.regex_duplicate_interface():
+ subitems.append(_item)
+
+ for msg, _items in self.report_items[key].items():
+ if not _items:
+ continue
+
+ subitems.append({
+ "type": "label",
+ "value": "# {}".format(msg)
+ })
+ if isinstance(_items, str):
+ _items = [_items]
+ subitems.append({
+ "type": "label",
+ "value": '{}
'.format("
".join(_items))
+ })
+
+ if items and subitems:
+ items.append(self.report_splitter)
+
+ items.extend(subitems)
+
+ return {
+ "items": items,
+ "title": title,
+ "success": False,
+ "message": "Synchronization Finished"
+ }
+
+
+class SyncToAvalonLocal(BaseAction):
+ """
+ Synchronizing data action - from Ftrack to Avalon DB
+
+ Stores all information about entity.
+ - Name(string) - Most important information = identifier of entity
+ - Parent(ObjectId) - Avalon Project Id, if entity is not project itself
+ - Data(dictionary):
+ - VisualParent(ObjectId) - Avalon Id of parent asset
+ - Parents(array of string) - All parent names except project
+ - Tasks(array of string) - Tasks on asset
+ - FtrackId(string)
+ - entityType(string) - entity's type on Ftrack
+ * All Custom attributes in group 'Avalon'
+ - custom attributes that start with 'avalon_' are skipped
+
+ * These information are stored for entities in whole project.
+
+ Avalon ID of asset is stored to Ftrack
+ - Custom attribute 'avalon_mongo_id'.
+ - action IS NOT creating this Custom attribute if doesn't exist
+ - run 'Create Custom Attributes' action
+ - or do it manually (Not recommended)
+ """
+
+ #: Action identifier.
+ identifier = "sync.to.avalon.local"
+ #: Action label.
+ label = "Pype Admin"
+ #: Action variant
+ variant = "- Sync To Avalon (Local)"
+ #: Action description.
+ description = "Send data from Ftrack to Avalon"
+ #: priority
+ priority = 200
+ #: roles that are allowed to register this action
+ role_list = ["Pypeclub"]
+ icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
+ os.environ.get('PYPE_STATICS_SERVER', '')
+ )
+
+ def discover(self, session, entities, event):
+ ''' Validation '''
+ for ent in event["data"]["selection"]:
+ # Ignore entities that are not tasks or projects
+ if ent["entityType"].lower() in ["show", "task"]:
+ return True
+ return False
+
+ def launch(self, session, in_entities, event):
+ time_start = time.time()
+
+ self.show_message(event, "Synchronization - Preparing data", True)
+ # Get ftrack project
+ if in_entities[0].entity_type.lower() == "project":
+ ft_project_name = in_entities[0]["full_name"]
+ else:
+ ft_project_name = in_entities[0]["project"]["full_name"]
+
+ try:
+ entities_factory = SyncEntitiesFactory(
+ self.log, session, ft_project_name
+ )
+ time_1 = time.time()
+
+ entities_factory.set_cutom_attributes()
+ time_2 = time.time()
+
+ # This must happen before all filtering!!!
+ entities_factory.prepare_avalon_entities(ft_project_name)
+ time_3 = time.time()
+
+ entities_factory.filter_by_ignore_sync()
+ time_4 = time.time()
+
+ entities_factory.duplicity_regex_check()
+ time_5 = time.time()
+
+ entities_factory.prepare_ftrack_ent_data()
+ time_6 = time.time()
+
+ entities_factory.synchronize()
+ time_7 = time.time()
+
+ self.log.debug(
+ "*** Synchronization finished ***"
+ )
+ self.log.debug(
+ "preparation <{}>".format(time_1 - time_start)
+ )
+ self.log.debug(
+ "set_cutom_attributes <{}>".format(time_2 - time_1)
+ )
+ self.log.debug(
+ "prepare_avalon_entities <{}>".format(time_3 - time_2)
+ )
+ self.log.debug(
+ "filter_by_ignore_sync <{}>".format(time_4 - time_3)
+ )
+ self.log.debug(
+ "duplicity_regex_check <{}>".format(time_5 - time_4)
+ )
+ self.log.debug(
+ "prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
+ )
+ self.log.debug(
+ "synchronize <{}>".format(time_7 - time_6)
+ )
+ self.log.debug(
+ "* Total time: {}".format(time_7 - time_start)
+ )
+
+ report = entities_factory.report()
+ if report and report.get("items"):
+ default_title = "Synchronization report ({}):".format(
+ ft_project_name
+ )
+ self.show_interface(
+ items=report["items"],
+ title=report.get("title", default_title),
+ event=event
+ )
+ return {
+ "success": True,
+ "message": "Synchronization Finished"
+ }
+
+ except Exception:
+ self.log.error(
+ "Synchronization failed due to code error", exc_info=True
+ )
+ msg = "An error occurred during synchronization"
+ title = "Synchronization report ({}):".format(ft_project_name)
+ items = []
+ items.append({
+ "type": "label",
+ "value": "# {}".format(msg)
+ })
+ items.append({
+ "type": "label",
+ "value": "## Traceback of the error"
+ })
+ items.append({
+ "type": "label",
+ "value": "{}
".format(
+ str(traceback.format_exc()).replace(
+ "\n", "
").replace(
+ " ", " "
+ )
+ )
+ })
+
+ report = {"items": []}
+ try:
+ report = entities_factory.report()
+ except Exception:
+ pass
+
+ _items = report.get("items", [])
+ if _items:
+ items.append(entities_factory.report_splitter)
+ items.extend(_items)
+
+ self.show_interface(items, title, event)
+
+ return {"success": True, "message": msg}
+
+ finally:
+ try:
+ entities_factory.dbcon.uninstall()
+ except Exception:
+ pass
+
+ try:
+ entities_factory.session.close()
+ except Exception:
+ pass
+
+
+def register(session, plugins_presets={}):
+ '''Register plugin. Called when used as an plugin.'''
+
+ SyncToAvalonLocal(session, plugins_presets).register()
diff --git a/pype/ftrack/actions/action_sync_to_avalon_local.py b/pype/ftrack/actions/action_sync_to_avalon_local.py
deleted file mode 100644
index 61050f9883..0000000000
--- a/pype/ftrack/actions/action_sync_to_avalon_local.py
+++ /dev/null
@@ -1,266 +0,0 @@
-import os
-import sys
-import time
-import argparse
-import logging
-import json
-import collections
-
-from pype.vendor import ftrack_api
-from pype.ftrack import BaseAction
-from pype.ftrack.lib import avalon_sync as ftracklib
-from pype.vendor.ftrack_api import session as fa_session
-
-
-class SyncToAvalon(BaseAction):
- '''
- Synchronizing data action - from Ftrack to Avalon DB
-
- Stores all information about entity.
- - Name(string) - Most important information = identifier of entity
- - Parent(ObjectId) - Avalon Project Id, if entity is not project itself
- - Silo(string) - Last parent except project
- - Data(dictionary):
- - VisualParent(ObjectId) - Avalon Id of parent asset
- - Parents(array of string) - All parent names except project
- - Tasks(array of string) - Tasks on asset
- - FtrackId(string)
- - entityType(string) - entity's type on Ftrack
- * All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
-
- * These information are stored also for all parents and children entities.
-
- Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
- - action IS NOT creating this Custom attribute if doesn't exist
- - run 'Create Custom Attributes' action or do it manually (Not recommended)
-
- If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
- - name, parents and silo are checked -> shows error if are not exact the same
- - after sync it is not allowed to change names or move entities
-
- If ID in 'avalon_mongo_id' is empty string or is not found in DB:
- - tries to find entity by name
- - found:
- - raise error if ftrackId/visual parent/parents are not same
- - not found:
- - Creates asset/project
-
- '''
-
- #: Action identifier.
- identifier = 'sync.to.avalon.local'
- #: Action label.
- label = "Pype Admin"
- variant = '- Sync To Avalon (Local)'
- #: Action description.
- description = 'Send data from Ftrack to Avalon'
- #: Action icon.
- icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
- os.environ.get('PYPE_STATICS_SERVER', '')
- )
- #: roles that are allowed to register this action
- role_list = ['Pypeclub']
- #: Action priority
- priority = 200
-
- project_query = (
- "select full_name, name, custom_attributes"
- ", project_schema._task_type_schema.types.name"
- " from Project where full_name is \"{}\""
- )
-
- entities_query = (
- "select id, name, parent_id, link, custom_attributes"
- " from TypedContext where project.full_name is \"{}\""
- )
-
- # Entity type names(lowered) that won't be synchronized with their children
- ignore_entity_types = ["task", "milestone"]
-
- def __init__(self, session, plugins_presets):
- super(SyncToAvalon, self).__init__(session)
- # reload utils on initialize (in case of server restart)
-
- def discover(self, session, entities, event):
- ''' Validation '''
- for entity in entities:
- if entity.entity_type.lower() not in ['task', 'assetversion']:
- return True
-
- return False
-
- def launch(self, session, entities, event):
- time_start = time.time()
- message = ""
-
- # JOB SETTINGS
- userId = event['source']['user']['id']
- user = session.query('User where id is ' + userId).one()
-
- job = session.create('Job', {
- 'user': user,
- 'status': 'running',
- 'data': json.dumps({
- 'description': 'Sync Ftrack to Avalon.'
- })
- })
- session.commit()
- try:
- self.log.debug("Preparing entities for synchronization")
-
- if entities[0].entity_type.lower() == "project":
- ft_project_name = entities[0]["full_name"]
- else:
- ft_project_name = entities[0]["project"]["full_name"]
-
- project_entities = session.query(
- self.entities_query.format(ft_project_name)
- ).all()
-
- ft_project = session.query(
- self.project_query.format(ft_project_name)
- ).one()
-
- entities_by_id = {}
- entities_by_parent = collections.defaultdict(list)
-
- entities_by_id[ft_project["id"]] = ft_project
- for ent in project_entities:
- entities_by_id[ent["id"]] = ent
- entities_by_parent[ent["parent_id"]].append(ent)
-
- importable = []
- for ent_info in event["data"]["selection"]:
- ent = entities_by_id[ent_info["entityId"]]
- for link_ent_info in ent["link"]:
- link_ent = entities_by_id[link_ent_info["id"]]
- if (
- ent.entity_type.lower() in self.ignore_entity_types or
- link_ent in importable
- ):
- continue
-
- importable.append(link_ent)
-
- def add_children(parent_id):
- ents = entities_by_parent[parent_id]
- for ent in ents:
- if ent.entity_type.lower() in self.ignore_entity_types:
- continue
-
- if ent not in importable:
- importable.append(ent)
-
- add_children(ent["id"])
-
- # add children of selection to importable
- for ent_info in event["data"]["selection"]:
- add_children(ent_info["entityId"])
-
- # Check names: REGEX in schema/duplicates - raise error if found
- all_names = []
- duplicates = []
-
- for entity in importable:
- ftracklib.avalon_check_name(entity)
- if entity.entity_type.lower() == "project":
- continue
-
- if entity['name'] in all_names:
- duplicates.append("'{}'".format(entity['name']))
- else:
- all_names.append(entity['name'])
-
- if len(duplicates) > 0:
- # TODO Show information to user and return False
- raise ValueError(
- "Entity name duplication: {}".format(", ".join(duplicates))
- )
-
- # ----- PROJECT ------
- avalon_project = ftracklib.get_avalon_project(ft_project)
- custom_attributes = ftracklib.get_avalon_attr(session)
-
- # Import all entities to Avalon DB
- for entity in importable:
- result = ftracklib.import_to_avalon(
- session=session,
- entity=entity,
- ft_project=ft_project,
- av_project=avalon_project,
- custom_attributes=custom_attributes
- )
- # TODO better error handling
- # maybe split into critical, warnings and messages?
- if 'errors' in result and len(result['errors']) > 0:
- job['status'] = 'failed'
- session.commit()
-
- ftracklib.show_errors(self, event, result['errors'])
-
- return {
- 'success': False,
- 'message': "Sync to avalon FAILED"
- }
-
- if avalon_project is None:
- if 'project' in result:
- avalon_project = result['project']
-
- job['status'] = 'done'
-
- except ValueError as ve:
- # TODO remove this part!!!!
- job['status'] = 'failed'
- message = str(ve)
- self.log.error(
- 'Error during syncToAvalon: {}'.format(message),
- exc_info=True
- )
-
- except Exception as e:
- job['status'] = 'failed'
- exc_type, exc_obj, exc_tb = sys.exc_info()
- fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
- log_message = "{}/{}/Line: {}".format(
- exc_type, fname, exc_tb.tb_lineno
- )
- self.log.error(
- 'Error during syncToAvalon: {}'.format(log_message),
- exc_info=True
- )
- # TODO add traceback to message and show to user
- message = (
- 'Unexpected Error'
- ' - Please check Log for more information'
- )
- finally:
- if job['status'] in ['queued', 'running']:
- job['status'] = 'failed'
- session.commit()
-
- time_end = time.time()
- self.log.debug("Synchronization took \"{}\"".format(
- str(time_end - time_start)
- ))
-
- if job["status"] != "failed":
- self.log.debug("Triggering Sync hierarchical attributes")
- self.trigger_action("sync.hierarchical.attrs.local", event)
-
- if len(message) > 0:
- message = "Unable to sync: {}".format(message)
- return {
- 'success': False,
- 'message': message
- }
-
- return {
- 'success': True,
- 'message': "Synchronization was successfull"
- }
-
-
-def register(session, plugins_presets={}):
- '''Register plugin. Called when used as an plugin.'''
- SyncToAvalon(session, plugins_presets).register()
diff --git a/pype/ftrack/actions/action_thumbnail_to_childern.py b/pype/ftrack/actions/action_thumbnail_to_childern.py
index 7d189cf652..4a6a85a6e6 100644
--- a/pype/ftrack/actions/action_thumbnail_to_childern.py
+++ b/pype/ftrack/actions/action_thumbnail_to_childern.py
@@ -43,7 +43,7 @@ class ThumbToChildren(BaseAction):
'description': 'Push thumbnails to Childrens'
})
})
-
+ session.commit()
try:
for entity in entities:
thumbid = entity['thumbnail_id']
@@ -53,10 +53,11 @@ class ThumbToChildren(BaseAction):
# inform the user that the job is done
job['status'] = 'done'
- except Exception:
+ except Exception as exc:
+ session.rollback()
# fail the job if something goes wrong
job['status'] = 'failed'
- raise
+ raise exc
finally:
session.commit()
diff --git a/pype/ftrack/actions/action_thumbnail_to_parent.py b/pype/ftrack/actions/action_thumbnail_to_parent.py
index efafca4a96..596d49836f 100644
--- a/pype/ftrack/actions/action_thumbnail_to_parent.py
+++ b/pype/ftrack/actions/action_thumbnail_to_parent.py
@@ -40,9 +40,9 @@ class ThumbToParent(BaseAction):
'status': 'running',
'data': json.dumps({
'description': 'Push thumbnails to parents'
- })
})
-
+ })
+ session.commit()
try:
for entity in entities:
parent = None
@@ -74,10 +74,11 @@ class ThumbToParent(BaseAction):
# inform the user that the job is done
job['status'] = status or 'done'
- except Exception as e:
+ except Exception as exc:
+ session.rollback()
# fail the job if something goes wrong
job['status'] = 'failed'
- raise e
+ raise exc
finally:
session.commit()
diff --git a/pype/ftrack/events/action_sync_hier_attrs.py b/pype/ftrack/events/action_sync_hier_attrs.py
deleted file mode 100644
index 23ac319261..0000000000
--- a/pype/ftrack/events/action_sync_hier_attrs.py
+++ /dev/null
@@ -1,383 +0,0 @@
-import os
-import sys
-import json
-import argparse
-import logging
-import collections
-
-from pypeapp import config
-from pype.vendor import ftrack_api
-from pype.ftrack import BaseAction, lib
-from pype.ftrack.lib.io_nonsingleton import DbConnector
-from bson.objectid import ObjectId
-
-
-class SyncHierarchicalAttrs(BaseAction):
-
- db_con = DbConnector()
- ca_mongoid = lib.get_ca_mongoid()
-
- #: Action identifier.
- identifier = 'sync.hierarchical.attrs'
- #: Action label.
- label = "Pype Admin"
- variant = '- Sync Hier Attrs (Server)'
- #: Action description.
- description = 'Synchronize hierarchical attributes'
- #: Icon
- icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
- os.environ.get(
- 'PYPE_STATICS_SERVER',
- 'http://localhost:{}'.format(
- config.get_presets().get('services', {}).get(
- 'statics_server', {}
- ).get('default_port', 8021)
- )
- )
- )
-
- def register(self):
- self.session.event_hub.subscribe(
- 'topic=ftrack.action.discover',
- self._discover
- )
-
- self.session.event_hub.subscribe(
- 'topic=ftrack.action.launch and data.actionIdentifier={}'.format(
- self.identifier
- ),
- self._launch
- )
-
- def discover(self, session, entities, event):
- ''' Validation '''
- role_check = False
- discover = False
- role_list = ['Pypeclub', 'Administrator', 'Project Manager']
- user = session.query(
- 'User where id is "{}"'.format(event['source']['user']['id'])
- ).one()
-
- for role in user['user_security_roles']:
- if role['security_role']['name'] in role_list:
- role_check = True
- break
-
- if role_check is True:
- for entity in entities:
- context_type = entity.get('context_type', '').lower()
- if (
- context_type in ('show', 'task') and
- entity.entity_type.lower() != 'task'
- ):
- discover = True
- break
-
- return discover
-
- def launch(self, session, entities, event):
- self.interface_messages = {}
-
- user = session.query(
- 'User where id is "{}"'.format(event['source']['user']['id'])
- ).one()
-
- job = session.create('Job', {
- 'user': user,
- 'status': 'running',
- 'data': json.dumps({
- 'description': 'Sync Hierachical attributes'
- })
- })
- session.commit()
- self.log.debug('Job with id "{}" created'.format(job['id']))
-
- process_session = ftrack_api.Session(
- server_url=session.server_url,
- api_key=session.api_key,
- api_user=session.api_user,
- auto_connect_event_hub=True
- )
- try:
- # Collect hierarchical attrs
- self.log.debug('Collecting Hierarchical custom attributes started')
- custom_attributes = {}
- all_avalon_attr = process_session.query(
- 'CustomAttributeGroup where name is "avalon"'
- ).one()
-
- error_key = (
- 'Hierarchical attributes with set "default" value (not allowed)'
- )
-
- for cust_attr in all_avalon_attr['custom_attribute_configurations']:
- if 'avalon_' in cust_attr['key']:
- continue
-
- if not cust_attr['is_hierarchical']:
- continue
-
- if cust_attr['default']:
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
- self.interface_messages[error_key].append(
- cust_attr['label']
- )
-
- self.log.warning((
- 'Custom attribute "{}" has set default value.'
- ' This attribute can\'t be synchronized'
- ).format(cust_attr['label']))
- continue
-
- custom_attributes[cust_attr['key']] = cust_attr
-
- self.log.debug(
- 'Collecting Hierarchical custom attributes has finished'
- )
-
- if not custom_attributes:
- msg = 'No hierarchical attributes to sync.'
- self.log.debug(msg)
- return {
- 'success': True,
- 'message': msg
- }
-
- entity = entities[0]
- if entity.entity_type.lower() == 'project':
- project_name = entity['full_name']
- else:
- project_name = entity['project']['full_name']
-
- self.db_con.install()
- self.db_con.Session['AVALON_PROJECT'] = project_name
-
- _entities = self._get_entities(event, process_session)
-
- for entity in _entities:
- self.log.debug(30*'-')
- self.log.debug(
- 'Processing entity "{}"'.format(entity.get('name', entity))
- )
-
- ent_name = entity.get('name', entity)
- if entity.entity_type.lower() == 'project':
- ent_name = entity['full_name']
-
- for key in custom_attributes:
- self.log.debug(30*'*')
- self.log.debug(
- 'Processing Custom attribute key "{}"'.format(key)
- )
- # check if entity has that attribute
- if key not in entity['custom_attributes']:
- error_key = 'Missing key on entities'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- self.interface_messages[error_key].append(
- '- key: "{}" - entity: "{}"'.format(key, ent_name)
- )
-
- self.log.error((
- '- key "{}" not found on "{}"'
- ).format(key, entity.get('name', entity)))
- continue
-
- value = self.get_hierarchical_value(key, entity)
- if value is None:
- error_key = (
- 'Missing value for key on entity'
- ' and its parents (synchronization was skipped)'
- )
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- self.interface_messages[error_key].append(
- '- key: "{}" - entity: "{}"'.format(key, ent_name)
- )
-
- self.log.warning((
- '- key "{}" not set on "{}" or its parents'
- ).format(key, ent_name))
- continue
-
- self.update_hierarchical_attribute(entity, key, value)
-
- job['status'] = 'done'
- session.commit()
-
- except Exception:
- self.log.error(
- 'Action "{}" failed'.format(self.label),
- exc_info=True
- )
-
- finally:
- self.db_con.uninstall()
-
- if job['status'] in ('queued', 'running'):
- job['status'] = 'failed'
- session.commit()
-
- if self.interface_messages:
- self.show_interface_from_dict(
- messages=self.interface_messages,
- title="something went wrong",
- event=event
- )
-
- return True
-
- def get_hierarchical_value(self, key, entity):
- value = entity['custom_attributes'][key]
- if (
- value is not None or
- entity.entity_type.lower() == 'project'
- ):
- return value
-
- return self.get_hierarchical_value(key, entity['parent'])
-
- def update_hierarchical_attribute(self, entity, key, value):
- if (
- entity['context_type'].lower() not in ('show', 'task') or
- entity.entity_type.lower() == 'task'
- ):
- return
-
- ent_name = entity.get('name', entity)
- if entity.entity_type.lower() == 'project':
- ent_name = entity['full_name']
-
- hierarchy = '/'.join(
- [a['name'] for a in entity.get('ancestors', [])]
- )
- if hierarchy:
- hierarchy = '/'.join(
- [entity['project']['full_name'], hierarchy, entity['name']]
- )
- elif entity.entity_type.lower() == 'project':
- hierarchy = entity['full_name']
- else:
- hierarchy = '/'.join(
- [entity['project']['full_name'], entity['name']]
- )
-
- self.log.debug('- updating entity "{}"'.format(hierarchy))
-
- # collect entity's custom attributes
- custom_attributes = entity.get('custom_attributes')
- if not custom_attributes:
- return
-
- mongoid = custom_attributes.get(self.ca_mongoid)
- if not mongoid:
- error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- if ent_name not in self.interface_messages[error_key]:
- self.interface_messages[error_key].append(ent_name)
-
- self.log.warning(
- '-- entity "{}" is not synchronized to avalon. Skipping'.format(
- ent_name
- )
- )
- return
-
- try:
- mongoid = ObjectId(mongoid)
- except Exception:
- error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- if ent_name not in self.interface_messages[error_key]:
- self.interface_messages[error_key].append(ent_name)
-
- self.log.warning(
- '-- entity "{}" has stored invalid MongoID. Skipping'.format(
- ent_name
- )
- )
- return
- # Find entity in Mongo DB
- mongo_entity = self.db_con.find_one({'_id': mongoid})
- if not mongo_entity:
- error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
- if error_key not in self.interface_messages:
- self.interface_messages[error_key] = []
-
- if ent_name not in self.interface_messages[error_key]:
- self.interface_messages[error_key].append(ent_name)
-
- self.log.warning(
- '-- entity "{}" was not found in DB by id "{}". Skipping'.format(
- ent_name, str(mongoid)
- )
- )
- return
-
- # Change value if entity has set it's own
- entity_value = custom_attributes[key]
- if entity_value is not None:
- value = entity_value
-
- data = mongo_entity.get('data') or {}
-
- data[key] = value
- self.db_con.update_many(
- {'_id': mongoid},
- {'$set': {'data': data}}
- )
-
- for child in entity.get('children', []):
- self.update_hierarchical_attribute(child, key, value)
-
-
-def register(session, plugins_presets):
- '''Register plugin. Called when used as an plugin.'''
-
- SyncHierarchicalAttrs(session, plugins_presets).register()
-
-
-def main(arguments=None):
- '''Set up logging and register action.'''
- if arguments is None:
- arguments = []
-
- parser = argparse.ArgumentParser()
- # Allow setting of logging level from arguments.
- loggingLevels = {}
- for level in (
- logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
- logging.ERROR, logging.CRITICAL
- ):
- loggingLevels[logging.getLevelName(level).lower()] = level
-
- parser.add_argument(
- '-v', '--verbosity',
- help='Set the logging output verbosity.',
- choices=loggingLevels.keys(),
- default='info'
- )
- namespace = parser.parse_args(arguments)
-
- # Set up basic logging
- logging.basicConfig(level=loggingLevels[namespace.verbosity])
-
- session = ftrack_api.Session()
- register(session)
-
- # Wait for events
- logging.info(
- 'Registered actions and listening for events. Use Ctrl-C to abort.'
- )
- session.event_hub.wait()
-
-
-if __name__ == '__main__':
- raise SystemExit(main(sys.argv[1:]))
diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py
index 7b5f94f216..0587aae31e 100644
--- a/pype/ftrack/events/action_sync_to_avalon.py
+++ b/pype/ftrack/events/action_sync_to_avalon.py
@@ -1,338 +1,2352 @@
import os
-import sys
-import argparse
-import logging
-import json
import collections
+import re
+import queue
import time
+import toml
+import traceback
-from pypeapp import config
+from bson.objectid import ObjectId
+from bson.errors import InvalidId
+from pymongo import UpdateOne
+
+import avalon
+from pype.ftrack import BaseAction
+from pype.ftrack.lib.io_nonsingleton import DbConnector
from pype.vendor import ftrack_api
-from pype.ftrack import BaseAction, lib
from pype.vendor.ftrack_api import session as fa_session
+from pypeapp import Anatomy, config
-class SyncToAvalon(BaseAction):
- '''
- Synchronizing data action - from Ftrack to Avalon DB
-
- Stores all information about entity.
- - Name(string) - Most important information = identifier of entity
- - Parent(ObjectId) - Avalon Project Id, if entity is not project itself
- - Silo(string) - Last parent except project
- - Data(dictionary):
- - VisualParent(ObjectId) - Avalon Id of parent asset
- - Parents(array of string) - All parent names except project
- - Tasks(array of string) - Tasks on asset
- - FtrackId(string)
- - entityType(string) - entity's type on Ftrack
- * All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
-
- * These information are stored also for all parents and children entities.
-
- Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
- - action IS NOT creating this Custom attribute if doesn't exist
- - run 'Create Custom Attributes' action or do it manually (Not recommended)
-
- If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
- - name, parents and silo are checked -> shows error if are not exact the same
- - after sync it is not allowed to change names or move entities
-
- If ID in 'avalon_mongo_id' is empty string or is not found in DB:
- - tries to find entity by name
- - found:
- - raise error if ftrackId/visual parent/parents are not same
- - not found:
- - Creates asset/project
-
- '''
-
- #: Action identifier.
- identifier = 'sync.to.avalon'
- #: Action label.
- label = "Pype Admin"
- variant = "- Sync To Avalon (Server)"
- #: Action description.
- description = 'Send data from Ftrack to Avalon'
- #: Action icon.
- icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
- os.environ.get(
- 'PYPE_STATICS_SERVER',
- 'http://localhost:{}'.format(
- config.get_presets().get('services', {}).get(
- 'statics_server', {}
- ).get('default_port', 8021)
- )
- )
- )
+class SyncEntitiesFactory:
+ dbcon = DbConnector()
project_query = (
"select full_name, name, custom_attributes"
", project_schema._task_type_schema.types.name"
" from Project where full_name is \"{}\""
)
-
entities_query = (
- "select id, name, parent_id, link, custom_attributes"
- " from TypedContext where project.full_name is \"{}\""
+ "select id, name, parent_id, link"
+ " from TypedContext where project_id is \"{}\""
)
+ ignore_custom_attr_key = "avalon_ignore_sync"
+ id_cust_attr = "avalon_mongo_id"
- # Entity type names(lowered) that won't be synchronized with their children
- ignore_entity_types = ["task", "milestone"]
+ entity_schemas = {
+ "project": "avalon-core:project-2.0",
+ "asset": "avalon-core:asset-3.0",
+ "config": "avalon-core:config-1.0"
+ }
- def register(self):
- self.session.event_hub.subscribe(
- 'topic=ftrack.action.discover',
- self._discover
+ report_splitter = {"type": "label", "value": "---"}
+
+ def __init__(self, log_obj, _session, project_full_name):
+ self.log = log_obj
+ self.session = ftrack_api.Session(
+ server_url=_session.server_url,
+ api_key=_session.api_key,
+ api_user=_session.api_user,
+ auto_connect_event_hub=True
)
- self.session.event_hub.subscribe(
- 'topic=ftrack.action.launch and data.actionIdentifier={0}'.format(
- self.identifier
- ),
- self._launch
- )
+ self.cancel_auto_sync = False
- def discover(self, session, entities, event):
- ''' Validation '''
- roleCheck = False
- discover = False
- roleList = ['Pypeclub', 'Administrator', 'Project Manager']
- userId = event['source']['user']['id']
- user = session.query('User where id is ' + userId).one()
+ self.schema_patterns = {}
+ self.duplicates = {}
+ self.failed_regex = {}
+ self.tasks_failed_regex = collections.defaultdict(list)
+ self.report_items = {
+ "info": collections.defaultdict(list),
+ "warning": collections.defaultdict(list),
+ "error": collections.defaultdict(list)
+ }
- for role in user['user_security_roles']:
- if role['security_role']['name'] in roleList:
- roleCheck = True
- break
- if roleCheck is True:
- for entity in entities:
- if entity.entity_type.lower() not in ['task', 'assetversion']:
- discover = True
- break
+ self.create_list = []
+ self.recreated_ftrack_ents = {}
+ self.updates = collections.defaultdict(dict)
- return discover
+ self._avalon_ents_by_id = None
+ self._avalon_ents_by_ftrack_id = None
+ self._avalon_ents_by_name = None
+ self._avalon_ents_by_parent_id = None
- def launch(self, session, entities, event):
- time_start = time.time()
- message = ""
+ self._avalon_archived_ents = None
+ self._avalon_archived_by_id = None
+ self._avalon_archived_by_parent_id = None
+ self._avalon_archived_by_name = None
- # JOB SETTINGS
- userId = event['source']['user']['id']
- user = session.query('User where id is ' + userId).one()
+ self._subsets_by_parent_id = None
+ self._changeability_by_mongo_id = None
- job = session.create('Job', {
- 'user': user,
- 'status': 'running',
- 'data': json.dumps({
- 'description': 'Sync Ftrack to Avalon.'
+ self.all_filtered_entities = {}
+ # self.all_filtered_ids = []
+ self.filtered_ids = []
+ self.not_selected_ids = []
+
+ self._ent_pats_by_ftrack_id = {}
+
+ # Get Ftrack project
+ ft_project = self.session.query(
+ self.project_query.format(project_full_name)
+ ).one()
+ ft_project_id = ft_project["id"]
+
+ # Skip if project is ignored
+ if ft_project["custom_attributes"].get(
+ self.ignore_custom_attr_key
+ ) is True:
+ msg = (
+ "Project \"{}\" has set `Ignore Sync` custom attribute to True"
+ ).format(project_full_name)
+ self.log.warning(msg)
+ return {"success": False, "message": msg}
+
+ # Check if `avalon_mongo_id` custom attribute exist or is accessible
+ if self.id_cust_attr not in ft_project["custom_attributes"]:
+ items = []
+ items.append({
+ "type": "label",
+ "value": "# Can't access Custom attribute <{}>".format(
+ self.id_cust_attr
+ )
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "- Check if user \"{}\" has permissions"
+ " to access the Custom attribute
"
+ ).format(_session.api_key)
+ })
+ items.append({
+ "type": "label",
+ "value": "- Check if the Custom attribute exist
"
})
- })
- session.commit()
- try:
- self.log.debug("Preparing entities for synchronization")
-
- if entities[0].entity_type.lower() == "project":
- ft_project_name = entities[0]["full_name"]
- else:
- ft_project_name = entities[0]["project"]["full_name"]
-
- project_entities = session.query(
- self.entities_query.format(ft_project_name)
- ).all()
-
- ft_project = session.query(
- self.project_query.format(ft_project_name)
- ).one()
-
- entities_by_id = {}
- entities_by_parent = collections.defaultdict(list)
-
- entities_by_id[ft_project["id"]] = ft_project
- for ent in project_entities:
- entities_by_id[ent["id"]] = ent
- entities_by_parent[ent["parent_id"]].append(ent)
-
- importable = []
- for ent_info in event["data"]["selection"]:
- ent = entities_by_id[ent_info["entityId"]]
- for link_ent_info in ent["link"]:
- link_ent = entities_by_id[link_ent_info["id"]]
- if (
- ent.entity_type.lower() in self.ignore_entity_types or
- link_ent in importable
- ):
- continue
-
- importable.append(link_ent)
-
- def add_children(parent_id):
- ents = entities_by_parent[parent_id]
- for ent in ents:
- if ent.entity_type.lower() in self.ignore_entity_types:
- continue
-
- if ent not in importable:
- importable.append(ent)
-
- add_children(ent["id"])
-
- # add children of selection to importable
- for ent_info in event["data"]["selection"]:
- add_children(ent_info["entityId"])
-
- # Check names: REGEX in schema/duplicates - raise error if found
- all_names = []
- duplicates = []
-
- for entity in importable:
- lib.avalon_check_name(entity)
- if entity.entity_type.lower() == "project":
- continue
-
- if entity['name'] in all_names:
- duplicates.append("'{}'".format(entity['name']))
- else:
- all_names.append(entity['name'])
-
- if len(duplicates) > 0:
- # TODO Show information to user and return False
- raise ValueError(
- "Entity name duplication: {}".format(", ".join(duplicates))
- )
-
- # ----- PROJECT ------
- avalon_project = lib.get_avalon_project(ft_project)
- custom_attributes = lib.get_avalon_attr(session)
-
- # Import all entities to Avalon DB
- for entity in importable:
- result = lib.import_to_avalon(
- session=session,
- entity=entity,
- ft_project=ft_project,
- av_project=avalon_project,
- custom_attributes=custom_attributes
- )
- # TODO better error handling
- # maybe split into critical, warnings and messages?
- if 'errors' in result and len(result['errors']) > 0:
- job['status'] = 'failed'
- session.commit()
-
- lib.show_errors(self, event, result['errors'])
-
- return {
- 'success': False,
- 'message': "Sync to avalon FAILED"
- }
-
- if avalon_project is None:
- if 'project' in result:
- avalon_project = result['project']
-
- job['status'] = 'done'
- session.commit()
-
- except ValueError as ve:
- # TODO remove this part!!!!
- job['status'] = 'failed'
- session.commit()
- message = str(ve)
- self.log.error(
- 'Error during syncToAvalon: {}'.format(message),
- exc_info=True
- )
-
- except Exception as e:
- job['status'] = 'failed'
- session.commit()
- exc_type, exc_obj, exc_tb = sys.exc_info()
- fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
- log_message = "{}/{}/Line: {}".format(
- exc_type, fname, exc_tb.tb_lineno
- )
- self.log.error(
- 'Error during syncToAvalon: {}'.format(log_message),
- exc_info=True
- )
- # TODO add traceback to message and show to user
- message = (
- 'Unexpected Error'
- ' - Please check Log for more information'
- )
-
- finally:
- if job['status'] in ['queued', 'running']:
- job['status'] = 'failed'
-
- session.commit()
-
- time_end = time.time()
- self.log.debug("Synchronization took \"{}\"".format(
- str(time_end - time_start)
- ))
-
- if job["status"] != "failed":
- self.log.debug("Triggering Sync hierarchical attributes")
- self.trigger_action("sync.hierarchical.attrs", event)
-
- if len(message) > 0:
- message = "Unable to sync: {}".format(message)
return {
- 'success': False,
- 'message': message
+ "items": items,
+ "title": "Synchronization failed",
+ "success": False,
+ "message": "Synchronization failed"
}
+ # Find all entities in project
+ all_project_entities = self.session.query(
+ self.entities_query.format(ft_project_id)
+ ).all()
+
+ # Store entities by `id` and `parent_id`
+ entities_dict = collections.defaultdict(lambda: {
+ "children": list(),
+ "parent_id": None,
+ "entity": None,
+ "entity_type": None,
+ "name": None,
+ "custom_attributes": {},
+ "hier_attrs": {},
+ "avalon_attrs": {},
+ "tasks": []
+ })
+
+ for entity in all_project_entities:
+ parent_id = entity["parent_id"]
+ entity_type = entity.entity_type
+ entity_type_low = entity_type.lower()
+ if entity_type_low == "task":
+ entities_dict[parent_id]["tasks"].append(entity["name"])
+ continue
+
+ entity_id = entity["id"]
+ entities_dict[entity_id].update({
+ "entity": entity,
+ "parent_id": parent_id,
+ "entity_type": entity_type_low,
+ "entity_type_orig": entity_type,
+ "name": entity["name"]
+ })
+ entities_dict[parent_id]["children"].append(entity_id)
+
+ entities_dict[ft_project_id]["entity"] = ft_project
+ entities_dict[ft_project_id]["entity_type"] = (
+ ft_project.entity_type.lower()
+ )
+ entities_dict[ft_project_id]["entity_type_orig"] = (
+ ft_project.entity_type
+ )
+ entities_dict[ft_project_id]["name"] = ft_project["full_name"]
+
+ self.ft_project_id = ft_project_id
+ self.entities_dict = entities_dict
+
+ @property
+ def avalon_ents_by_id(self):
+ if self._avalon_ents_by_id is None:
+ self._avalon_ents_by_id = {}
+ for entity in self.avalon_entities:
+ self._avalon_ents_by_id[str(entity["_id"])] = entity
+
+ return self._avalon_ents_by_id
+
+ @property
+ def avalon_ents_by_ftrack_id(self):
+ if self._avalon_ents_by_ftrack_id is None:
+ self._avalon_ents_by_ftrack_id = {}
+ for entity in self.avalon_entities:
+ key = entity.get("data", {}).get("ftrackId")
+ if not key:
+ continue
+ self._avalon_ents_by_ftrack_id[key] = str(entity["_id"])
+
+ return self._avalon_ents_by_ftrack_id
+
+ @property
+ def avalon_ents_by_name(self):
+ if self._avalon_ents_by_name is None:
+ self._avalon_ents_by_name = {}
+ for entity in self.avalon_entities:
+ self._avalon_ents_by_name[entity["name"]] = str(entity["_id"])
+
+ return self._avalon_ents_by_name
+
+ @property
+ def avalon_ents_by_parent_id(self):
+ if self._avalon_ents_by_parent_id is None:
+ self._avalon_ents_by_parent_id = collections.defaultdict(list)
+ for entity in self.avalon_entities:
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is not None:
+ parent_id = str(parent_id)
+ self._avalon_ents_by_parent_id[parent_id].append(entity)
+
+ return self._avalon_ents_by_parent_id
+
+ @property
+ def avalon_archived_ents(self):
+ if self._avalon_archived_ents is None:
+ self._avalon_archived_ents = [
+ ent for ent in self.dbcon.find({"type": "archived_asset"})
+ ]
+ return self._avalon_archived_ents
+
+ @property
+ def avalon_archived_by_name(self):
+ if self._avalon_archived_by_name is None:
+ self._avalon_archived_by_name = collections.defaultdict(list)
+ for ent in self.avalon_archived_ents:
+ self._avalon_archived_by_name[ent["name"]].append(ent)
+ return self._avalon_archived_by_name
+
+ @property
+ def avalon_archived_by_id(self):
+ if self._avalon_archived_by_id is None:
+ self._avalon_archived_by_id = {
+ str(ent["_id"]): ent for ent in self.avalon_archived_ents
+ }
+ return self._avalon_archived_by_id
+
+ @property
+ def avalon_archived_by_parent_id(self):
+ if self._avalon_archived_by_parent_id is None:
+ self._avalon_archived_by_parent_id = collections.defaultdict(list)
+ for entity in self.avalon_archived_ents:
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is not None:
+ parent_id = str(parent_id)
+ self._avalon_archived_by_parent_id[parent_id].append(entity)
+
+ return self._avalon_archived_by_parent_id
+
+ @property
+ def subsets_by_parent_id(self):
+ if self._subsets_by_parent_id is None:
+ self._subsets_by_parent_id = collections.defaultdict(list)
+ for subset in self.dbcon.find({"type": "subset"}):
+ self._subsets_by_parent_id[str(subset["parent"])].append(
+ subset
+ )
+
+ return self._subsets_by_parent_id
+
+ @property
+ def changeability_by_mongo_id(self):
+ if self._changeability_by_mongo_id is None:
+ self._changeability_by_mongo_id = collections.defaultdict(
+ lambda: True
+ )
+ self._changeability_by_mongo_id[self.avalon_project_id] = False
+ self._bubble_changeability(list(self.subsets_by_parent_id.keys()))
+ return self._changeability_by_mongo_id
+
+ @property
+ def all_ftrack_names(self):
+ return [
+ ent_dict["name"] for ent_dict in self.entities_dict.values() if (
+ ent_dict.get("name")
+ )
+ ]
+
+ def duplicity_regex_check(self):
+ self.log.debug("* Checking duplicities and invalid symbols")
+ # Duplicity and regex check
+ entity_ids_by_name = {}
+ duplicates = []
+ failed_regex = []
+ task_names = {}
+ for ftrack_id, entity_dict in self.entities_dict.items():
+ regex_check = True
+ name = entity_dict["name"]
+ entity_type = entity_dict["entity_type"]
+ # Tasks must be checked too
+ for task_name in entity_dict["tasks"]:
+ passed = task_names.get(task_name)
+ if passed is None:
+ passed = self.check_regex(task_name, "task")
+ task_names[task_name] = passed
+
+ if not passed:
+ self.tasks_failed_regex[task_name].append(ftrack_id)
+
+ if name in entity_ids_by_name:
+ duplicates.append(name)
+ else:
+ entity_ids_by_name[name] = []
+ regex_check = self.check_regex(name, entity_type)
+
+ entity_ids_by_name[name].append(ftrack_id)
+ if not regex_check:
+ failed_regex.append(name)
+
+ for name in failed_regex:
+ self.failed_regex[name] = entity_ids_by_name[name]
+
+ for name in duplicates:
+ self.duplicates[name] = entity_ids_by_name[name]
+
+ self.filter_by_duplicate_regex()
+
+ def check_regex(self, name, entity_type, in_schema=None):
+ schema_name = "asset-3.0"
+ if in_schema:
+ schema_name = in_schema
+ elif entity_type == "project":
+ schema_name = "project-2.0"
+ elif entity_type == "task":
+ schema_name = "task"
+
+ name_pattern = self.schema_patterns.get(schema_name)
+ if not name_pattern:
+ default_pattern = "^[a-zA-Z0-9_.]*$"
+ schema_obj = avalon.schema._cache.get(schema_name + ".json")
+ if not schema_obj:
+ name_pattern = default_pattern
+ else:
+ name_pattern = schema_obj.get(
+ "properties", {}).get(
+ "name", {}).get(
+ "pattern", default_pattern
+ )
+ self.schema_patterns[schema_name] = name_pattern
+
+ if re.match(name_pattern, name):
+ return True
+ return False
+
+ def filter_by_duplicate_regex(self):
+ filter_queue = queue.Queue()
+ failed_regex_msg = "{} - Entity has invalid symbol/s in name"
+ duplicate_msg = "Multiple entities have name \"{}\":"
+
+ for ids in self.failed_regex.values():
+ for id in ids:
+ ent_path = self.get_ent_path(id)
+ self.log.warning(failed_regex_msg.format(ent_path))
+ filter_queue.put(id)
+
+ for name, ids in self.duplicates.items():
+ self.log.warning(duplicate_msg.format(name))
+ for id in ids:
+ ent_path = self.get_ent_path(id)
+ self.log.warning(ent_path)
+ filter_queue.put(id)
+
+ filtered_ids = []
+ while not filter_queue.empty():
+ ftrack_id = filter_queue.get()
+ if ftrack_id in filtered_ids:
+ continue
+
+ entity_dict = self.entities_dict.pop(ftrack_id, {})
+ if not entity_dict:
+ continue
+
+ self.all_filtered_entities[ftrack_id] = entity_dict
+ parent_id = entity_dict.get("parent_id")
+ if parent_id and parent_id in self.entities_dict:
+ if ftrack_id in self.entities_dict[parent_id]["children"]:
+ self.entities_dict[parent_id]["children"].remove(ftrack_id)
+
+ filtered_ids.append(ftrack_id)
+ for child_id in entity_dict.get("children", []):
+ filter_queue.put(child_id)
+
+ # self.all_filtered_ids.extend(filtered_ids)
+
+ for name, ids in self.tasks_failed_regex.items():
+ for id in ids:
+ if id not in self.entities_dict:
+ continue
+ self.entities_dict[id]["tasks"].remove(name)
+ ent_path = self.get_ent_path(id)
+ self.log.warning(failed_regex_msg.format(
+ "/".join([ent_path, name])
+ ))
+
+ def filter_by_ignore_sync(self):
+ # skip filtering if `ignore_sync` attribute do not exist
+ if self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
+ self.ignore_custom_attr_key, "_notset_"
+ ) == "_notset_":
+ return
+
+ self.filter_queue = queue.Queue()
+ self.filter_queue.put((self.ft_project_id, False))
+ while not self.filter_queue.empty():
+ parent_id, remove = self.filter_queue.get()
+ if remove:
+ parent_dict = self.entities_dict.pop(parent_id, {})
+ self.all_filtered_entities[parent_id] = parent_dict
+ self.filtered_ids.append(parent_id)
+ else:
+ parent_dict = self.entities_dict.get(parent_id, {})
+
+ for child_id in parent_dict.get("children", []):
+ # keep original `remove` value for all childs
+ _remove = (remove is True)
+ if not _remove:
+ if self.entities_dict[child_id]["avalon_attrs"].get(
+ self.ignore_custom_attr_key
+ ):
+ self.entities_dict[parent_id]["children"].remove(
+ child_id
+ )
+ _remove = True
+ self.filter_queue.put((child_id, _remove))
+
+ # self.all_filtered_ids.extend(self.filtered_ids)
+
+ def filter_by_selection(self, event):
+ # BUGGY!!!! cause that entities are in deleted list
+ # TODO may be working when filtering happen after preparations
+ # - But this part probably does not have any functional reason
+ # - Time of synchronization probably won't be changed much
+ selected_ids = []
+ for entity in event["data"]["selection"]:
+ # Skip if project is in selection
+ if entity["entityType"] == "show":
+ return
+ selected_ids.append(entity["entityId"])
+
+ sync_ids = [self.ft_project_id]
+ parents_queue = queue.Queue()
+ children_queue = queue.Queue()
+ for id in selected_ids:
+ # skip if already filtered with ignore sync custom attribute
+ if id in self.filtered_ids:
+ continue
+
+ parents_queue.put(id)
+ children_queue.put(id)
+
+ while not parents_queue.empty():
+ id = parents_queue.get()
+ while True:
+ # Stops when parent is in sync_ids
+ if id in self.filtered_ids or id in sync_ids or id is None:
+ break
+ sync_ids.append(id)
+ id = self.entities_dict[id]["parent_id"]
+
+ while not children_queue.empty():
+ parent_id = children_queue.get()
+ for child_id in self.entities_dict[parent_id]["children"]:
+ if child_id in sync_ids or child_id in self.filtered_ids:
+ continue
+ sync_ids.append(child_id)
+ children_queue.put(child_id)
+
+ # separate not selected and to process entities
+ for key, value in self.entities_dict.items():
+ if key not in sync_ids:
+ self.not_selected_ids.append(key)
+
+ for id in self.not_selected_ids:
+ # pop from entities
+ value = self.entities_dict.pop(id)
+ # remove entity from parent's children
+ parent_id = value["parent_id"]
+ if parent_id not in sync_ids:
+ continue
+
+ self.entities_dict[parent_id]["children"].remove(id)
+
+ def set_cutom_attributes(self):
+ self.log.debug("* Preparing custom attributes")
+ # Get custom attributes and values
+ custom_attrs, hier_attrs = self.get_avalon_attr(True)
+ ent_types = self.session.query("select id, name from ObjectType").all()
+ ent_types_by_name = {
+ ent_type["name"]: ent_type["id"] for ent_type in ent_types
+ }
+
+ attrs = set()
+ # store default values per entity type
+ attrs_per_entity_type = collections.defaultdict(dict)
+ avalon_attrs = collections.defaultdict(dict)
+ # store also custom attribute configuration id for future use (create)
+ attrs_per_entity_type_ca_id = collections.defaultdict(dict)
+ avalon_attrs_ca_id = collections.defaultdict(dict)
+
+ for cust_attr in custom_attrs:
+ key = cust_attr["key"]
+ attrs.add(key)
+ ca_ent_type = cust_attr["entity_type"]
+ if key.startswith("avalon_"):
+ if ca_ent_type == "show":
+ avalon_attrs[ca_ent_type][key] = cust_attr["default"]
+ avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"]
+ else:
+ obj_id = cust_attr["object_type_id"]
+ avalon_attrs[obj_id][key] = cust_attr["default"]
+ avalon_attrs_ca_id[obj_id][key] = cust_attr["id"]
+ continue
+
+ if ca_ent_type == "show":
+ attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"]
+ attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"]
+ else:
+ obj_id = cust_attr["object_type_id"]
+ attrs_per_entity_type[obj_id][key] = cust_attr["default"]
+ attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"]
+
+ obj_id_ent_type_map = {}
+ sync_ids = []
+ for entity_id, entity_dict in self.entities_dict.items():
+ sync_ids.append(entity_id)
+ entity_type = entity_dict["entity_type"]
+ entity_type_orig = entity_dict["entity_type_orig"]
+
+ if entity_type == "project":
+ attr_key = "show"
+ else:
+ map_key = obj_id_ent_type_map.get(entity_type_orig)
+ if not map_key:
+ # Put space between capitals
+ # (e.g. 'AssetBuild' -> 'Asset Build')
+ map_key = re.sub(
+ r"(\w)([A-Z])", r"\1 \2", entity_type_orig
+ )
+ obj_id_ent_type_map[entity_type_orig] = map_key
+
+ # Get object id of entity type
+ attr_key = ent_types_by_name.get(map_key)
+
+ # Backup soluction when id is not found by prequeried objects
+ if not attr_key:
+ query = "ObjectType where name is \"{}\"".format(map_key)
+ attr_key = self.session.query(query).one()["id"]
+ ent_types_by_name[map_key] = attr_key
+
+ prepared_attrs = attrs_per_entity_type.get(attr_key)
+ prepared_avalon_attr = avalon_attrs.get(attr_key)
+ prepared_attrs_ca_id = attrs_per_entity_type_ca_id.get(attr_key)
+ prepared_avalon_attr_ca_id = avalon_attrs_ca_id.get(attr_key)
+ if prepared_attrs:
+ self.entities_dict[entity_id]["custom_attributes"] = (
+ prepared_attrs.copy()
+ )
+ if prepared_attrs_ca_id:
+ self.entities_dict[entity_id]["custom_attributes_id"] = (
+ prepared_attrs_ca_id.copy()
+ )
+ if prepared_avalon_attr:
+ self.entities_dict[entity_id]["avalon_attrs"] = (
+ prepared_avalon_attr.copy()
+ )
+ if prepared_avalon_attr_ca_id:
+ self.entities_dict[entity_id]["avalon_attrs_id"] = (
+ prepared_avalon_attr_ca_id.copy()
+ )
+
+ # TODO query custom attributes by entity_id
+ entity_ids_joined = ", ".join([
+ "\"{}\"".format(id) for id in sync_ids
+ ])
+ attributes_joined = ", ".join([
+ "\"{}\"".format(name) for name in attrs
+ ])
+
+ cust_attr_query = (
+ "select value, entity_id from CustomAttributeValue "
+ "where entity_id in ({}) and configuration.key in ({})"
+ )
+ [values] = self.session._call([{
+ "action": "query",
+ "expression": cust_attr_query.format(
+ entity_ids_joined, attributes_joined
+ )
+ }])
+
+ for value in values["data"]:
+ entity_id = value["entity_id"]
+ key = value["configuration"]["key"]
+ store_key = "custom_attributes"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ self.entities_dict[entity_id][store_key][key] = value["value"]
+
+ # process hierarchical attributes
+ self.set_hierarchical_attribute(hier_attrs, sync_ids)
+
+ def set_hierarchical_attribute(self, hier_attrs, sync_ids):
+ # collect all hierarchical attribute keys
+ # and prepare default values to project
+ attribute_names = []
+ for attr in hier_attrs:
+ key = attr["key"]
+ attribute_names.append(key)
+
+ store_key = "hier_attrs"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+
+ self.entities_dict[self.ft_project_id][store_key][key] = (
+ attr["default"]
+ )
+
+ # Prepare dict with all hier keys and None values
+ prepare_dict = {}
+ prepare_dict_avalon = {}
+ for attr in attribute_names:
+ if attr.startswith("avalon_"):
+ prepare_dict_avalon[attr] = None
+ else:
+ prepare_dict[attr] = None
+
+ for id, entity_dict in self.entities_dict.items():
+ # Skip project because has stored defaults at the moment
+ if entity_dict["entity_type"] == "project":
+ continue
+ entity_dict["hier_attrs"] = prepare_dict.copy()
+ for key, val in prepare_dict_avalon.items():
+ entity_dict["avalon_attrs"][key] = val
+
+ # Prepare values to query
+ entity_ids_joined = ", ".join([
+ "\"{}\"".format(id) for id in sync_ids
+ ])
+ attributes_joined = ", ".join([
+ "\"{}\"".format(name) for name in attribute_names
+ ])
+ [values] = self.session._call([{
+ "action": "query",
+ "expression": (
+ "select value, entity_id from CustomAttributeValue "
+ "where entity_id in ({}) and configuration.key in ({})"
+ ).format(entity_ids_joined, attributes_joined)
+ }])
+
+ avalon_hier = []
+ for value in values["data"]:
+ if value["value"] is None:
+ continue
+ entity_id = value["entity_id"]
+ key = value["configuration"]["key"]
+ store_key = "hier_attrs"
+ if key.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ avalon_hier.append(key)
+ self.entities_dict[entity_id][store_key][key] = value["value"]
+
+ # Get dictionary with not None hierarchical values to pull to childs
+ top_id = self.ft_project_id
+ project_values = {}
+ for key, value in self.entities_dict[top_id]["hier_attrs"].items():
+ if value is not None:
+ project_values[key] = value
+
+ for key in avalon_hier:
+ value = self.entities_dict[top_id]["avalon_attrs"][key]
+ if value is not None:
+ project_values[key] = value
+
+ hier_down_queue = queue.Queue()
+ hier_down_queue.put((project_values, top_id))
+
+ while not hier_down_queue.empty():
+ hier_values, parent_id = hier_down_queue.get()
+ for child_id in self.entities_dict[parent_id]["children"]:
+ _hier_values = hier_values.copy()
+ for name in attribute_names:
+ store_key = "hier_attrs"
+ if name.startswith("avalon_"):
+ store_key = "avalon_attrs"
+ value = self.entities_dict[child_id][store_key][name]
+ if value is not None:
+ _hier_values[name] = value
+
+ self.entities_dict[child_id]["hier_attrs"].update(_hier_values)
+ hier_down_queue.put((_hier_values, child_id))
+
+ def remove_from_archived(self, mongo_id):
+ entity = self.avalon_archived_by_id.pop(mongo_id, None)
+ if not entity:
+ return
+
+ if self._avalon_archived_ents is not None:
+ if entity in self._avalon_archived_ents:
+ self._avalon_archived_ents.remove(entity)
+
+ if self._avalon_archived_by_name is not None:
+ name = entity["name"]
+ if name in self._avalon_archived_by_name:
+ name_ents = self._avalon_archived_by_name[name]
+ if entity in name_ents:
+ if len(name_ents) == 1:
+ self._avalon_archived_by_name.pop(name)
+ else:
+ self._avalon_archived_by_name[name].remove(entity)
+
+ # TODO use custom None instead of __NOTSET__
+ if self._avalon_archived_by_parent_id is not None:
+ parent_id = entity.get("data", {}).get(
+ "visualParent", "__NOTSET__"
+ )
+ if parent_id is not None:
+ parent_id = str(parent_id)
+
+ if parent_id in self._avalon_archived_by_parent_id:
+ parent_list = self._avalon_archived_by_parent_id[parent_id]
+ if entity not in parent_list:
+ self._avalon_archived_by_parent_id[parent_id].remove(
+ entity
+ )
+
+ def prepare_ftrack_ent_data(self):
+ not_set_ids = []
+ for id, entity_dict in self.entities_dict.items():
+ entity = entity_dict["entity"]
+ if entity is None:
+ not_set_ids.append(id)
+ continue
+
+ self.entities_dict[id]["final_entity"] = {}
+ self.entities_dict[id]["final_entity"]["name"] = (
+ entity_dict["name"]
+ )
+ data = {}
+ data["ftrackId"] = entity["id"]
+ data["entityType"] = entity_dict["entity_type_orig"]
+
+ for key, val in entity_dict.get("custom_attributes", []).items():
+ data[key] = val
+
+ for key, val in entity_dict.get("hier_attrs", []).items():
+ data[key] = val
+
+ if id == self.ft_project_id:
+ data["code"] = entity["name"]
+ self.entities_dict[id]["final_entity"]["data"] = data
+ self.entities_dict[id]["final_entity"]["type"] = "project"
+
+ proj_schema = entity["project_schema"]
+ task_types = proj_schema["_task_type_schema"]["types"]
+ self.entities_dict[id]["final_entity"]["config"] = {
+ "tasks": [{"name": tt["name"]} for tt in task_types],
+ "apps": self.get_project_apps(data)
+ }
+ continue
+
+ ent_path_items = [ent["name"] for ent in entity["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ hierarchy = ""
+ if len(parents) > 0:
+ hierarchy = os.path.sep.join(parents)
+
+ data["parents"] = parents
+ data["hierarchy"] = hierarchy
+ data["tasks"] = self.entities_dict[id].pop("tasks", [])
+ self.entities_dict[id]["final_entity"]["data"] = data
+ self.entities_dict[id]["final_entity"]["type"] = "asset"
+
+ if not_set_ids:
+ self.log.debug((
+ "- Debug information: Filtering bug, in entities dict are "
+ "empty dicts (function should not affect) <{}>"
+ ).format("| ".join(not_set_ids)))
+ for id in not_set_ids:
+ self.entities_dict.pop(id)
+
+ def get_project_apps(self, proj_data):
+ apps = []
+ missing_toml_msg = "Missing config file for application"
+ error_msg = (
+ "Unexpected error happend during preparation of application"
+ )
+ for app in proj_data.get("applications"):
+ try:
+ toml_path = avalon.lib.which_app(app)
+ # TODO report
+ if not toml_path:
+ self.log.warning(missing_toml_msg + '"{}"'.format(app))
+ self.report_items["warning"][missing_toml_msg].append(app)
+ continue
+
+ apps.append({
+ "name": app,
+ "label": toml.load(toml_path)["label"]
+ })
+ except Exception:
+ # TODO report
+ self.report_items["warning"][error_msg].append(app)
+ self.log.warning((
+ "Error has happened during preparing application \"{}\""
+ ).format(app), exc_info=True)
+ return apps
+
+ def get_ent_path(self, ftrack_id):
+ ent_path = self._ent_pats_by_ftrack_id.get(ftrack_id)
+ if not ent_path:
+ entity = self.entities_dict[ftrack_id]["entity"]
+ ent_path = "/".join(
+ [ent["name"] for ent in entity["link"]]
+ )
+ self._ent_pats_by_ftrack_id[ftrack_id] = ent_path
+
+ return ent_path
+
+ def prepare_avalon_entities(self, ft_project_name):
+ self.log.debug((
+ "* Preparing avalon entities "
+ "(separate to Create, Update and Deleted groups)"
+ ))
+ # Avalon entities
+ self.dbcon.install()
+ self.dbcon.Session["AVALON_PROJECT"] = ft_project_name
+ avalon_project = self.dbcon.find_one({"type": "project"})
+ avalon_entities = self.dbcon.find({"type": "asset"})
+ self.avalon_project = avalon_project
+ self.avalon_entities = avalon_entities
+
+ ftrack_avalon_mapper = {}
+ avalon_ftrack_mapper = {}
+ create_ftrack_ids = []
+ update_ftrack_ids = []
+
+ same_mongo_id = []
+ all_mongo_ids = {}
+ for ftrack_id, entity_dict in self.entities_dict.items():
+ mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
+ if not mongo_id:
+ continue
+ if mongo_id in all_mongo_ids:
+ same_mongo_id.append(mongo_id)
+ else:
+ all_mongo_ids[mongo_id] = []
+ all_mongo_ids[mongo_id].append(ftrack_id)
+
+ if avalon_project:
+ mongo_id = str(avalon_project["_id"])
+ ftrack_avalon_mapper[self.ft_project_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = self.ft_project_id
+ update_ftrack_ids.append(self.ft_project_id)
+ else:
+ create_ftrack_ids.append(self.ft_project_id)
+
+ # make it go hierarchically
+ prepare_queue = queue.Queue()
+
+ for child_id in self.entities_dict[self.ft_project_id]["children"]:
+ prepare_queue.put(child_id)
+
+ while not prepare_queue.empty():
+ ftrack_id = prepare_queue.get()
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ prepare_queue.put(child_id)
+
+ entity_dict = self.entities_dict[ftrack_id]
+ ent_path = self.get_ent_path(ftrack_id)
+
+ mongo_id = entity_dict["avalon_attrs"].get(self.id_cust_attr)
+ av_ent_by_mongo_id = self.avalon_ents_by_id.get(mongo_id)
+ if av_ent_by_mongo_id:
+ av_ent_ftrack_id = av_ent_by_mongo_id.get("data", {}).get(
+ "ftrackId"
+ )
+ is_right = False
+ else_match_better = False
+ if av_ent_ftrack_id and av_ent_ftrack_id == ftrack_id:
+ is_right = True
+
+ elif mongo_id not in same_mongo_id:
+ is_right = True
+
+ else:
+ ftrack_ids_with_same_mongo = all_mongo_ids[mongo_id]
+ for _ftrack_id in ftrack_ids_with_same_mongo:
+ if _ftrack_id == av_ent_ftrack_id:
+ continue
+
+ _entity_dict = self.entities_dict[_ftrack_id]
+ _mongo_id = _entity_dict["avalon_attrs"][
+ self.id_cust_attr
+ ]
+ _av_ent_by_mongo_id = self.avalon_ents_by_id.get(
+ _mongo_id
+ )
+ _av_ent_ftrack_id = _av_ent_by_mongo_id.get(
+ "data", {}
+ ).get("ftrackId")
+ if _av_ent_ftrack_id == ftrack_id:
+ else_match_better = True
+ break
+
+ if not is_right and not else_match_better:
+ entity = entity_dict["entity"]
+ ent_path_items = [ent["name"] for ent in entity["link"]]
+ parents = ent_path_items[1:len(ent_path_items)-1:]
+ av_parents = av_ent_by_mongo_id["data"]["parents"]
+ if av_parents == parents:
+ is_right = True
+ else:
+ name = entity_dict["name"]
+ av_name = av_ent_by_mongo_id["name"]
+ if name == av_name:
+ is_right = True
+
+ if is_right:
+ self.log.debug(
+ "Existing (by MongoID) <{}>".format(ent_path)
+ )
+ ftrack_avalon_mapper[ftrack_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = ftrack_id
+ update_ftrack_ids.append(ftrack_id)
+ continue
+
+ mongo_id = self.avalon_ents_by_ftrack_id.get(ftrack_id)
+ if not mongo_id:
+ mongo_id = self.avalon_ents_by_name.get(entity_dict["name"])
+ if mongo_id:
+ self.log.debug(
+ "Existing (by matching name) <{}>".format(ent_path)
+ )
+ else:
+ self.log.debug(
+ "Existing (by FtrackID in mongo) <{}>".format(ent_path)
+ )
+
+ if mongo_id:
+ ftrack_avalon_mapper[ftrack_id] = mongo_id
+ avalon_ftrack_mapper[mongo_id] = ftrack_id
+ update_ftrack_ids.append(ftrack_id)
+ continue
+
+ self.log.debug("New <{}>".format(ent_path))
+ create_ftrack_ids.append(ftrack_id)
+
+ deleted_entities = []
+ for mongo_id in self.avalon_ents_by_id:
+ if mongo_id in avalon_ftrack_mapper:
+ continue
+ deleted_entities.append(mongo_id)
+
+ av_ent = self.avalon_ents_by_id[mongo_id]
+ av_ent_path_items = [p for p in av_ent["data"]["parents"]]
+ av_ent_path_items.append(av_ent["name"])
+ self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items)))
+
+ self.ftrack_avalon_mapper = ftrack_avalon_mapper
+ self.avalon_ftrack_mapper = avalon_ftrack_mapper
+ self.create_ftrack_ids = create_ftrack_ids
+ self.update_ftrack_ids = update_ftrack_ids
+ self.deleted_entities = deleted_entities
+
+ self.log.debug((
+ "Ftrack -> Avalon comparation: New <{}> "
+ "| Existing <{}> | Deleted <{}>"
+ ).format(
+ len(create_ftrack_ids),
+ len(update_ftrack_ids),
+ len(deleted_entities)
+ ))
+
+ def filter_with_children(self, ftrack_id):
+ if ftrack_id not in self.entities_dict:
+ return
+ ent_dict = self.entities_dict[ftrack_id]
+ parent_id = ent_dict["parent_id"]
+ self.entities_dict[parent_id]["children"].remove(ftrack_id)
+
+ children_queue = queue.Queue()
+ children_queue.put(ftrack_id)
+ while not children_queue.empty():
+ _ftrack_id = children_queue.get()
+ entity_dict = self.entities_dict.pop(_ftrack_id, {"children": []})
+ for child_id in entity_dict["children"]:
+ children_queue.put(child_id)
+
+ def prepare_changes(self):
+ self.log.debug("* Preparing changes for avalon/ftrack")
+ hierarchy_changing_ids = []
+ ignore_keys = collections.defaultdict(list)
+
+ update_queue = queue.Queue()
+ for ftrack_id in self.update_ftrack_ids:
+ update_queue.put(ftrack_id)
+
+ while not update_queue.empty():
+ ftrack_id = update_queue.get()
+ if ftrack_id == self.ft_project_id:
+ changes = self.prepare_project_changes()
+ if changes:
+ self.updates[self.avalon_project_id] = changes
+ continue
+
+ ftrack_ent_dict = self.entities_dict[ftrack_id]
+
+ # *** check parents
+ parent_check = False
+
+ ftrack_parent_id = ftrack_ent_dict["parent_id"]
+ avalon_id = self.ftrack_avalon_mapper[ftrack_id]
+ avalon_entity = self.avalon_ents_by_id[avalon_id]
+ avalon_parent_id = avalon_entity["data"]["visualParent"]
+ if avalon_parent_id is not None:
+ avalon_parent_id = str(avalon_parent_id)
+
+ ftrack_parent_mongo_id = self.ftrack_avalon_mapper[
+ ftrack_parent_id
+ ]
+
+ # if parent is project
+ if (ftrack_parent_mongo_id == avalon_parent_id) or (
+ ftrack_parent_id == self.ft_project_id and
+ avalon_parent_id is None
+ ):
+ parent_check = True
+
+ # check name
+ ftrack_name = ftrack_ent_dict["name"]
+ avalon_name = avalon_entity["name"]
+ name_check = ftrack_name == avalon_name
+
+ # IDEAL STATE: both parent and name check passed
+ if parent_check and name_check:
+ continue
+
+ # If entity is changeable then change values of parent or name
+ if self.changeability_by_mongo_id[avalon_id]:
+ # TODO logging
+ if not parent_check:
+ if ftrack_parent_mongo_id == str(self.avalon_project_id):
+ new_parent_name = self.entities_dict[
+ self.ft_project_id]["name"]
+ new_parent_id = None
+ else:
+ new_parent_name = self.avalon_ents_by_id[
+ ftrack_parent_mongo_id]["name"]
+ new_parent_id = ObjectId(ftrack_parent_mongo_id)
+
+ if avalon_parent_id == str(self.avalon_project_id):
+ old_parent_name = self.entities_dict[
+ self.ft_project_id]["name"]
+ else:
+ old_parent_name = self.avalon_ents_by_id[
+ ftrack_parent_mongo_id]["name"]
+
+ self.updates[avalon_id]["data"] = {
+ "visualParent": new_parent_id
+ }
+ ignore_keys[ftrack_id].append("data.visualParent")
+ self.log.debug((
+ "Avalon entity \"{}\" changed parent \"{}\" -> \"{}\""
+ ).format(avalon_name, old_parent_name, new_parent_name))
+
+ if not name_check:
+ self.updates[avalon_id]["name"] = ftrack_name
+ ignore_keys[ftrack_id].append("name")
+ self.log.debug(
+ "Avalon entity \"{}\" was renamed to \"{}\"".format(
+ avalon_name, ftrack_name
+ )
+ )
+ continue
+
+ # parents and hierarchy must be recalculated
+ hierarchy_changing_ids.append(ftrack_id)
+
+ # Parent is project if avalon_parent_id is set to None
+ if avalon_parent_id is None:
+ avalon_parent_id = str(self.avalon_project_id)
+
+ if not name_check:
+ ent_path = self.get_ent_path(ftrack_id)
+ # TODO report
+ # TODO logging
+ self.entities_dict[ftrack_id]["name"] = avalon_name
+ self.entities_dict[ftrack_id]["entity"]["name"] = (
+ avalon_name
+ )
+ self.entities_dict[ftrack_id]["final_entity"]["name"] = (
+ avalon_name
+ )
+ self.log.warning("Name was changed back to {} <{}>".format(
+ avalon_name, ent_path
+ ))
+ self._ent_pats_by_ftrack_id.pop(ftrack_id, None)
+ msg = (
+ " It is not allowed to change"
+ " name of entity or it's parents"
+ " that already has published context"
+ )
+ self.report_items["warning"][msg].append(ent_path)
+
+ # skip parent oricessing if hierarchy didn't change
+ if parent_check:
+ continue
+
+ # Logic when parenting(hierarchy) has changed and should not
+ old_ftrack_parent_id = self.avalon_ftrack_mapper.get(
+ avalon_parent_id
+ )
+
+ # If last ftrack parent id from mongo entity exist then just
+ # remap paren_id on entity
+ if old_ftrack_parent_id:
+ # TODO report
+ # TODO logging
+ ent_path = self.get_ent_path(ftrack_id)
+ msg = (
+ " It is not allowed"
+ " to change hierarchy of entity or it's parents"
+ " that already has published context"
+ )
+ self.report_items["warning"][msg].append(ent_path)
+ self.log.warning((
+ "Entity has published context so was moved"
+ " back in hierarchy <{}>"
+ ).format(ent_path))
+ self.entities_dict[ftrack_id]["entity"]["parent_id"] = (
+ old_ftrack_parent_id
+ )
+ self.entities_dict[ftrack_id]["parent_id"] = (
+ old_ftrack_parent_id
+ )
+ self.entities_dict[old_ftrack_parent_id][
+ "children"
+ ].append(ftrack_id)
+
+ continue
+
+ old_parent_ent = self.avalon_ents_by_id.get(avalon_parent_id)
+ if not old_parent_ent:
+ old_parent_ent = self.avalon_archived_by_id.get(
+ avalon_parent_id
+ )
+
+ # TODO report
+ # TODO logging
+ if not old_parent_ent:
+ self.log.warning((
+ "Parent entity was not found by id"
+ " - Trying to find by parent name"
+ ))
+ ent_path = self.get_ent_path(ftrack_id)
+
+ parents = avalon_entity["data"]["parents"]
+ parent_name = parents[-1]
+ matching_entity_id = None
+ for id, entity_dict in self.entities_dict.items():
+ if entity_dict["name"] == parent_name:
+ matching_entity_id = id
+ break
+
+ if matching_entity_id is None:
+ # TODO logging
+ # TODO report (turn off auto-sync?)
+ self.log.error((
+ "Entity has published context but was moved in"
+ " hierarchy and previous parent was not found so it is"
+ " not possible to solve this programmatically <{}>"
+ ).format(ent_path))
+ msg = (
+ " Parent of entity can't be"
+ " changed due to published context and previous parent"
+ " was not found"
+ )
+ self.report_items["error"][msg].append(ent_path)
+ self.filter_with_children(ftrack_id)
+ continue
+
+ matching_ent_dict = self.entities_dict.get(matching_entity_id)
+ match_ent_parents = matching_ent_dict.get(
+ "final_entity", {}).get(
+ "data", {}).get(
+ "parents", ["__NOT_SET__"]
+ )
+ # TODO logging
+ # TODO report
+ if (
+ len(match_ent_parents) >= len(parents) or
+ match_ent_parents[:-1] != parents
+ ):
+ ent_path = self.get_ent_path(ftrack_id)
+ self.log.error((
+ "Entity has published context but was moved in"
+ " hierarchy and previous parents were moved too it is"
+ " not possible to solve this programmatically <{}>"
+ ).format(ent_path))
+ msg = (
+ " Parent of entity can't be"
+ " changed due to published context but whole hierarchy"
+ " was scrambled"
+ )
+ continue
+
+ old_parent_ent = matching_ent_dict["final_entity"]
+
+ parent_id = self.ft_project_id
+ entities_to_create = []
+ # TODO logging
+ self.log.warning(
+ "Ftrack entities must be recreated because have"
+ " published context but were removed"
+ )
+
+ _avalon_ent = old_parent_ent
+
+ self.updates[avalon_parent_id] = {"type": "asset"}
+ success = True
+ while True:
+ _vis_par = _avalon_ent["data"]["visualParent"]
+ _name = _avalon_ent["name"]
+ if _name in self.all_ftrack_names:
+ av_ent_path_items = _avalon_ent["data"]["parents"]
+ av_ent_path_items.append(_name)
+ av_ent_path = "/".join(av_ent_path_items)
+ # TODO report
+ # TODO logging
+ self.log.error((
+ "Can't recreate entity in Ftrack because entity with"
+ " same name already exists in different hierarchy <{}>"
+ ).format(av_ent_path))
+ msg = (
+ " Parent of entity can't be"
+ " changed due to published context but previous parent"
+ " had name that exist in different hierarchy level"
+ )
+ self.report_items["error"][msg].append(av_ent_path)
+ self.filter_with_children(ftrack_id)
+ success = False
+ break
+
+ entities_to_create.append(_avalon_ent)
+ if _vis_par is None:
+ break
+
+ _vis_par = str(_vis_par)
+ _mapped = self.avalon_ftrack_mapper.get(_vis_par)
+ if _mapped:
+ parent_id = _mapped
+ break
+
+ _avalon_ent = self.avalon_ents_by_id.get(_vis_par)
+ if not _avalon_ent:
+ _avalon_ent = self.avalon_archived_by_id.get(_vis_par)
+
+ if success is False:
+ continue
+
+ new_entity_id = None
+ for av_entity in reversed(entities_to_create):
+ new_entity_id = self.create_ftrack_ent_from_avalon_ent(
+ av_entity, parent_id
+ )
+ update_queue.put(new_entity_id)
+
+ if new_entity_id:
+ ftrack_ent_dict["entity"]["parent_id"] = new_entity_id
+
+ if hierarchy_changing_ids:
+ self.reload_parents(hierarchy_changing_ids)
+
+ for ftrack_id in self.update_ftrack_ids:
+ if ftrack_id == self.ft_project_id:
+ continue
+
+ avalon_id = self.ftrack_avalon_mapper[ftrack_id]
+ avalon_entity = self.avalon_ents_by_id[avalon_id]
+
+ avalon_attrs = self.entities_dict[ftrack_id]["avalon_attrs"]
+ if (
+ self.id_cust_attr not in avalon_attrs or
+ avalon_attrs[self.id_cust_attr] != avalon_id
+ ):
+ configuration_id = self.entities_dict[ftrack_id][
+ "avalon_attrs_id"][self.id_cust_attr]
+
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": ftrack_id
+ })
+
+ self.session.recorded_operations.push(
+ fa_session.ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ fa_session.ftrack_api.symbol.NOT_SET,
+ avalon_id
+ )
+ )
+ # check rest of data
+ data_changes = self.compare_dict(
+ self.entities_dict[ftrack_id]["final_entity"],
+ avalon_entity,
+ ignore_keys[ftrack_id]
+ )
+ if data_changes:
+ self.updates[avalon_id] = self.merge_dicts(
+ data_changes,
+ self.updates[avalon_id]
+ )
+
+ def synchronize(self):
+ self.log.debug("* Synchronization begins")
+ avalon_project_id = self.ftrack_avalon_mapper.get(self.ft_project_id)
+ if avalon_project_id:
+ self.avalon_project_id = ObjectId(avalon_project_id)
+
+ # remove filtered ftrack ids from create/update list
+ for ftrack_id in self.all_filtered_entities:
+ if ftrack_id in self.create_ftrack_ids:
+ self.create_ftrack_ids.remove(ftrack_id)
+ elif ftrack_id in self.update_ftrack_ids:
+ self.update_ftrack_ids.remove(ftrack_id)
+
+ self.log.debug("* Processing entities for archivation")
+ self.delete_entities()
+
+ self.log.debug("* Processing new entities")
+ # Create not created entities
+ for ftrack_id in self.create_ftrack_ids:
+ # CHECK it is possible that entity was already created
+ # because is parent of another entity which was processed first
+ if ftrack_id in self.ftrack_avalon_mapper:
+ continue
+ self.create_avalon_entity(ftrack_id)
+
+ if len(self.create_list) > 0:
+ self.dbcon.insert_many(self.create_list)
+
+ self.session.commit()
+
+ self.log.debug("* Processing entities for update")
+ self.prepare_changes()
+ self.update_entities()
+ self.session.commit()
+
+ def create_avalon_entity(self, ftrack_id):
+ if ftrack_id == self.ft_project_id:
+ self.create_avalon_project()
+ return
+
+ entity_dict = self.entities_dict[ftrack_id]
+ parent_ftrack_id = entity_dict["parent_id"]
+ avalon_parent = None
+ if parent_ftrack_id != self.ft_project_id:
+ avalon_parent = self.ftrack_avalon_mapper.get(parent_ftrack_id)
+ # if not avalon_parent:
+ # self.create_avalon_entity(parent_ftrack_id)
+ # avalon_parent = self.ftrack_avalon_mapper[parent_ftrack_id]
+ avalon_parent = ObjectId(avalon_parent)
+
+ # avalon_archived_by_id avalon_archived_by_name
+ current_id = (
+ entity_dict["avalon_attrs"].get(self.id_cust_attr) or ""
+ ).strip()
+ mongo_id = current_id
+ name = entity_dict["name"]
+
+ # Check if exist archived asset in mongo - by ID
+ unarchive = False
+ unarchive_id = self.check_unarchivation(ftrack_id, mongo_id, name)
+ if unarchive_id is not None:
+ unarchive = True
+ mongo_id = unarchive_id
+
+ item = entity_dict["final_entity"]
+ try:
+ new_id = ObjectId(mongo_id)
+ if mongo_id in self.avalon_ftrack_mapper:
+ new_id = ObjectId()
+ except InvalidId:
+ new_id = ObjectId()
+
+ item["_id"] = new_id
+ item["parent"] = self.avalon_project_id
+ item["schema"] = self.entity_schemas["asset"]
+ item["data"]["visualParent"] = avalon_parent
+
+ new_id_str = str(new_id)
+ self.ftrack_avalon_mapper[ftrack_id] = new_id_str
+ self.avalon_ftrack_mapper[new_id_str] = ftrack_id
+
+ self._avalon_ents_by_id[new_id_str] = item
+ self._avalon_ents_by_ftrack_id[ftrack_id] = new_id_str
+ self._avalon_ents_by_name[item["name"]] = new_id_str
+
+ if current_id != new_id_str:
+ # store mongo id to ftrack entity
+ configuration_id = self.entities_dict[ftrack_id][
+ "avalon_attrs_id"
+ ][self.id_cust_attr]
+ _entity_key = collections.OrderedDict({
+ "configuration_id": configuration_id,
+ "entity_id": ftrack_id
+ })
+
+ self.session.recorded_operations.push(
+ fa_session.ftrack_api.operation.UpdateEntityOperation(
+ "ContextCustomAttributeValue",
+ _entity_key,
+ "value",
+ fa_session.ftrack_api.symbol.NOT_SET,
+ new_id_str
+ )
+ )
+
+ if unarchive is False:
+ self.create_list.append(item)
+ return
+ # If unarchive then replace entity data in database
+ self.dbcon.replace_one({"_id": new_id}, item)
+ self.remove_from_archived(mongo_id)
+ av_ent_path_items = item["data"]["parents"]
+ av_ent_path_items.append(item["name"])
+ av_ent_path = "/".join(av_ent_path_items)
+ self.log.debug("Entity was unarchived <{}>".format(av_ent_path))
+
+ def check_unarchivation(self, ftrack_id, mongo_id, name):
+ archived_by_id = self.avalon_archived_by_id.get(mongo_id)
+ archived_by_name = self.avalon_archived_by_name.get(name)
+
+ # if not found in archived then skip
+ if not archived_by_id and not archived_by_name:
+ return None
+
+ entity_dict = self.entities_dict[ftrack_id]
+
+ if archived_by_id:
+ # if is changeable then unarchive (nothing to check here)
+ if self.changeability_by_mongo_id[mongo_id]:
+ return mongo_id
+
+ # TODO replace `__NOTSET__` with custom None constant
+ archived_parent_id = archived_by_id["data"].get(
+ "visualParent", "__NOTSET__"
+ )
+ archived_parents = archived_by_id["data"].get("parents")
+ archived_name = archived_by_id["name"]
+
+ if (
+ archived_name != entity_dict["name"] or
+ archived_parents != entity_dict["final_entity"]["data"][
+ "parents"
+ ]
+ ):
+ return None
+
+ return mongo_id
+
+ # First check if there is any that have same parents
+ for archived in archived_by_name:
+ mongo_id = str(archived["_id"])
+ archived_parents = archived.get("data", {}).get("parents")
+ if (
+ archived_parents == entity_dict["final_entity"]["data"][
+ "parents"
+ ]
+ ):
+ return mongo_id
+
+ # Secondly try to find more close to current ftrack entity
+ first_changeable = None
+ for archived in archived_by_name:
+ mongo_id = str(archived["_id"])
+ if not self.changeability_by_mongo_id[mongo_id]:
+ continue
+
+ if first_changeable is None:
+ first_changeable = mongo_id
+
+ ftrack_parent_id = entity_dict["parent_id"]
+ map_ftrack_parent_id = self.ftrack_avalon_mapper.get(
+ ftrack_parent_id
+ )
+
+ # TODO replace `__NOTSET__` with custom None constant
+ archived_parent_id = archived.get("data", {}).get(
+ "visualParent", "__NOTSET__"
+ )
+ if archived_parent_id is not None:
+ archived_parent_id = str(archived_parent_id)
+
+ # skip if parent is archived - How this should be possible?
+ parent_entity = self.avalon_ents_by_id.get(archived_parent_id)
+ if (
+ parent_entity and (
+ map_ftrack_parent_id is not None and
+ map_ftrack_parent_id == str(parent_entity["_id"])
+ )
+ ):
+ return mongo_id
+ # Last return first changeable with same name (or None)
+ return first_changeable
+
+ def create_avalon_project(self):
+ project_item = self.entities_dict[self.ft_project_id]["final_entity"]
+ mongo_id = (
+ self.entities_dict[self.ft_project_id]["avalon_attrs"].get(
+ self.id_cust_attr
+ ) or ""
+ ).strip()
+
+ try:
+ new_id = ObjectId(mongo_id)
+ except InvalidId:
+ new_id = ObjectId()
+
+ project_item["_id"] = new_id
+ project_item["parent"] = None
+ project_item["schema"] = self.entity_schemas["project"]
+ project_item["config"]["schema"] = self.entity_schemas["config"]
+ project_item["config"]["template"] = self.get_avalon_project_template()
+
+ self.ftrack_avalon_mapper[self.ft_project_id] = new_id
+ self.avalon_ftrack_mapper[new_id] = self.ft_project_id
+
+ self.avalon_project_id = new_id
+
+ self._avalon_ents_by_id[str(new_id)] = project_item
+ self._avalon_ents_by_ftrack_id[self.ft_project_id] = str(new_id)
+ self._avalon_ents_by_name[project_item["name"]] = str(new_id)
+
+ self.create_list.append(project_item)
+
+ # store mongo id to ftrack entity
+ entity = self.entities_dict[self.ft_project_id]["entity"]
+ entity["custom_attributes"][self.id_cust_attr] = str(new_id)
+
+ def get_avalon_project_template(self):
+ """Get avalon template
+ Returns:
+ dictionary with templates
+ """
+ project_name = self.entities_dict[self.ft_project_id]["name"]
+ templates = Anatomy(project_name).templates
return {
- 'success': True,
- 'message': "Synchronization was successfull"
+ "workfile": templates["avalon"]["workfile"],
+ "work": templates["avalon"]["work"],
+ "publish": templates["avalon"]["publish"]
+ }
+
+ def _bubble_changeability(self, unchangeable_ids):
+ unchangeable_queue = queue.Queue()
+ for entity_id in unchangeable_ids:
+ unchangeable_queue.put((entity_id, False))
+
+ processed_parents_ids = []
+ subsets_to_remove = []
+ while not unchangeable_queue.empty():
+ entity_id, child_is_archived = unchangeable_queue.get()
+ # skip if already processed
+ if entity_id in processed_parents_ids:
+ continue
+
+ entity = self.avalon_ents_by_id.get(entity_id)
+ # if entity is not archived but unchageable child was then skip
+ # - archived entities should not affect not archived?
+ if entity and child_is_archived:
+ continue
+
+ # set changeability of current entity to False
+ self._changeability_by_mongo_id[entity_id] = False
+ processed_parents_ids.append(entity_id)
+ # if not entity then is probably archived
+ if not entity:
+ entity = self.avalon_archived_by_id.get(entity_id)
+ child_is_archived = True
+
+ if not entity:
+ # if entity is not found then it is subset without parent
+ if entity_id in unchangeable_ids:
+ subsets_to_remove.append(entity_id)
+ else:
+ # TODO logging - What is happening here?
+ self.log.warning((
+ "In avalon are entities without valid parents that"
+ " lead to Project (should not cause errors)"
+ " - MongoId <{}>"
+ ).format(str(entity_id)))
+ continue
+
+ # skip if parent is project
+ parent_id = entity["data"]["visualParent"]
+ if parent_id is None:
+ continue
+ unchangeable_queue.put((str(parent_id), child_is_archived))
+
+ self._delete_subsets_without_asset(subsets_to_remove)
+
+ def _delete_subsets_without_asset(self, not_existing_parents):
+ subset_ids = []
+ version_ids = []
+ repre_ids = []
+ to_delete = []
+
+ for parent_id in not_existing_parents:
+ subsets = self.subsets_by_parent_id.get(parent_id)
+ if not subsets:
+ continue
+ for subset in subsets:
+ if subset.get("type") != "subset":
+ continue
+ subset_ids.append(subset["_id"])
+
+ db_subsets = self.dbcon.find({
+ "_id": {"$in": subset_ids},
+ "type": "subset"
+ })
+ if not db_subsets:
+ return
+
+ db_versions = self.dbcon.find({
+ "parent": {"$in": subset_ids},
+ "type": "version"
+ })
+ if db_versions:
+ version_ids = [ver["_id"] for ver in db_versions]
+
+ db_repres = self.dbcon.find({
+ "parent": {"$in": version_ids},
+ "type": "representation"
+ })
+ if db_repres:
+ repre_ids = [repre["_id"] for repre in db_repres]
+
+ to_delete.extend(subset_ids)
+ to_delete.extend(version_ids)
+ to_delete.extend(repre_ids)
+
+ self.dbcon.delete_many({"_id": {"$in": to_delete}})
+
+ # Probably deprecated
+ def _check_changeability(self, parent_id=None):
+ for entity in self.avalon_ents_by_parent_id[parent_id]:
+ mongo_id = str(entity["_id"])
+ is_changeable = self._changeability_by_mongo_id.get(mongo_id)
+ if is_changeable is not None:
+ continue
+
+ self._check_changeability(mongo_id)
+ is_changeable = True
+ for child in self.avalon_ents_by_parent_id[parent_id]:
+ if not self._changeability_by_mongo_id[str(child["_id"])]:
+ is_changeable = False
+ break
+
+ if is_changeable is True:
+ is_changeable = (mongo_id in self.subsets_by_parent_id)
+ self._changeability_by_mongo_id[mongo_id] = is_changeable
+
+ def update_entities(self):
+ mongo_changes_bulk = []
+ for mongo_id, changes in self.updates.items():
+ filter = {"_id": ObjectId(mongo_id)}
+ change_data = self.from_dict_to_set(changes)
+ mongo_changes_bulk.append(UpdateOne(filter, change_data))
+
+ if not mongo_changes_bulk:
+ # TODO LOG
+ return
+ self.dbcon.bulk_write(mongo_changes_bulk)
+
+ def from_dict_to_set(self, data):
+ result = {"$set": {}}
+ dict_queue = queue.Queue()
+ dict_queue.put((None, data))
+
+ while not dict_queue.empty():
+ _key, _data = dict_queue.get()
+ for key, value in _data.items():
+ new_key = key
+ if _key is not None:
+ new_key = "{}.{}".format(_key, key)
+
+ if not isinstance(value, dict):
+ result["$set"][new_key] = value
+ continue
+ dict_queue.put((new_key, value))
+ return result
+
+ def reload_parents(self, hierarchy_changing_ids):
+ parents_queue = queue.Queue()
+ parents_queue.put((self.ft_project_id, [], False))
+ while not parents_queue.empty():
+ ftrack_id, parent_parents, changed = parents_queue.get()
+ _parents = parent_parents.copy()
+ if ftrack_id not in hierarchy_changing_ids and not changed:
+ if ftrack_id != self.ft_project_id:
+ _parents.append(self.entities_dict[ftrack_id]["name"])
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ parents_queue.put((child_id, _parents, changed))
+ continue
+
+ changed = True
+ parents = [par for par in _parents]
+ hierarchy = "/".join(parents)
+ self.entities_dict[ftrack_id][
+ "final_entity"]["data"]["parents"] = parents
+ self.entities_dict[ftrack_id][
+ "final_entity"]["data"]["hierarchy"] = hierarchy
+
+ _parents.append(self.entities_dict[ftrack_id]["name"])
+ for child_id in self.entities_dict[ftrack_id]["children"]:
+ parents_queue.put((child_id, _parents, changed))
+
+ if ftrack_id in self.create_ftrack_ids:
+ mongo_id = self.ftrack_avalon_mapper[ftrack_id]
+ if "data" not in self.updates[mongo_id]:
+ self.updates[mongo_id]["data"] = {}
+ self.updates[mongo_id]["data"]["parents"] = parents
+ self.updates[mongo_id]["data"]["hierarchy"] = hierarchy
+
+ def prepare_project_changes(self):
+ ftrack_ent_dict = self.entities_dict[self.ft_project_id]
+ ftrack_entity = ftrack_ent_dict["entity"]
+ avalon_code = self.avalon_project["data"]["code"]
+ # TODO Is possible to sync if full name was changed?
+ # if ftrack_ent_dict["name"] != self.avalon_project["name"]:
+ # ftrack_entity["full_name"] = avalon_name
+ # self.entities_dict[self.ft_project_id]["name"] = avalon_name
+ # self.entities_dict[self.ft_project_id]["final_entity"][
+ # "name"
+ # ] = avalon_name
+
+ # TODO logging
+ # TODO report
+ # TODO May this happen? Is possible to change project code?
+ if ftrack_entity["name"] != avalon_code:
+ ftrack_entity["name"] = avalon_code
+ self.entities_dict[self.ft_project_id]["final_entity"]["data"][
+ "code"
+ ] = avalon_code
+ self.session.commit()
+ sub_msg = (
+ "Project code was changed back to \"{}\"".format(avalon_code)
+ )
+ msg = (
+ "It is not allowed to change"
+ " project code after synchronization"
+ )
+ self.report_items["warning"][msg] = sub_msg
+ self.log.warning(sub_msg)
+
+ return self.compare_dict(
+ self.entities_dict[self.ft_project_id]["final_entity"],
+ self.avalon_project
+ )
+
+ def compare_dict(self, dict_new, dict_old, _ignore_keys=[]):
+ # _ignore_keys may be used for keys nested dict like"data.visualParent"
+ changes = {}
+ ignore_keys = []
+ for key_val in _ignore_keys:
+ key_items = key_val.split(".")
+ if len(key_items) == 1:
+ ignore_keys.append(key_items[0])
+
+ for key, value in dict_new.items():
+ if key in ignore_keys:
+ continue
+
+ if key not in dict_old:
+ changes[key] = value
+ continue
+
+ if isinstance(value, dict):
+ if not isinstance(dict_old[key], dict):
+ changes[key] = value
+ continue
+
+ _new_ignore_keys = []
+ for key_val in _ignore_keys:
+ key_items = key_val.split(".")
+ if len(key_items) <= 1:
+ continue
+ _new_ignore_keys.append(".".join(key_items[1:]))
+
+ _changes = self.compare_dict(
+ value, dict_old[key], _new_ignore_keys
+ )
+ if _changes:
+ changes[key] = _changes
+ continue
+
+ if value != dict_old[key]:
+ changes[key] = value
+
+ return changes
+
+ def merge_dicts(self, dict_new, dict_old):
+ # _ignore_keys may be used for keys nested dict like"data.visualParent"
+ for key, value in dict_new.items():
+ if key not in dict_old:
+ dict_old[key] = value
+ continue
+
+ if isinstance(value, dict):
+ dict_old[key] = self.merge_dicts(value, dict_old[key])
+ continue
+
+ dict_old[key] = value
+
+ return dict_old
+
+ def delete_entities(self):
+ if not self.deleted_entities:
+ return
+ # Try to order so child is not processed before parent
+ deleted_entities = []
+ _deleted_entities = [id for id in self.deleted_entities]
+
+ while True:
+ if not _deleted_entities:
+ break
+ _ready = []
+ for mongo_id in _deleted_entities:
+ ent = self.avalon_ents_by_id[mongo_id]
+ vis_par = ent["data"]["visualParent"]
+ if (
+ vis_par is not None and
+ str(vis_par) in self.deleted_entities
+ ):
+ continue
+ _ready.append(mongo_id)
+
+ for id in _ready:
+ deleted_entities.append(id)
+ _deleted_entities.remove(id)
+
+ delete_ids = []
+ for mongo_id in deleted_entities:
+ # delete if they are deletable
+ if self.changeability_by_mongo_id[mongo_id]:
+ delete_ids.append(ObjectId(mongo_id))
+ continue
+
+ # check if any new created entity match same entity
+ # - name and parents must match
+ deleted_entity = self.avalon_ents_by_id[mongo_id]
+ name = deleted_entity["name"]
+ parents = deleted_entity["data"]["parents"]
+ similar_ent_id = None
+ for ftrack_id in self.create_ftrack_ids:
+ _ent_final = self.entities_dict[ftrack_id]["final_entity"]
+ if _ent_final["name"] != name:
+ continue
+ if _ent_final["data"]["parents"] != parents:
+ continue
+
+ # If in create is "same" then we can "archive" current
+ # since will be unarchived in create method
+ similar_ent_id = ftrack_id
+ break
+
+ # If similar entity(same name and parents) is in create
+ # entities list then just change from create to update
+ if similar_ent_id is not None:
+ self.create_ftrack_ids.remove(similar_ent_id)
+ self.update_ftrack_ids.append(similar_ent_id)
+ self.avalon_ftrack_mapper[mongo_id] = similar_ent_id
+ self.ftrack_avalon_mapper[similar_ent_id] = mongo_id
+ continue
+
+ found_by_name_id = None
+ for ftrack_id, ent_dict in self.entities_dict.items():
+ if not ent_dict.get("name"):
+ continue
+
+ if name == ent_dict["name"]:
+ found_by_name_id = ftrack_id
+ break
+
+ if found_by_name_id is not None:
+ # * THESE conditins are too complex to implement in first stage
+ # - probably not possible to solve if this happen
+ # if found_by_name_id in self.create_ftrack_ids:
+ # # reparent entity of the new one create?
+ # pass
+ #
+ # elif found_by_name_id in self.update_ftrack_ids:
+ # found_mongo_id = self.ftrack_avalon_mapper[found_by_name_id]
+ #
+ # ent_dict = self.entities_dict[found_by_name_id]
+
+ # TODO report - CRITICAL entity with same name alread exists in
+ # different hierarchy - can't recreate entity
+ continue
+
+ _vis_parent = str(deleted_entity["data"]["visualParent"])
+ if _vis_parent is None:
+ _vis_parent = self.avalon_project_id
+ ftrack_parent_id = self.avalon_ftrack_mapper[_vis_parent]
+ self.create_ftrack_ent_from_avalon_ent(
+ deleted_entity, ftrack_parent_id
+ )
+
+ filter = {"_id": {"$in": delete_ids}, "type": "asset"}
+ self.dbcon.update_many(filter, {"$set": {"type": "archived_asset"}})
+
+ def create_ftrack_ent_from_avalon_ent(self, av_entity, parent_id):
+ new_entity = None
+ parent_entity = self.entities_dict[parent_id]["entity"]
+
+ _name = av_entity["name"]
+ _type = av_entity["data"].get("entityType", "folder")
+
+ self.log.debug((
+ "Re-ceating deleted entity {} <{}>"
+ ).format(_name, _type))
+
+ new_entity = self.session.create(_type, {
+ "name": _name,
+ "parent": parent_entity
+ })
+
+ final_entity = {}
+ for k, v in av_entity.items():
+ final_entity[k] = v
+
+ if final_entity.get("type") != "asset":
+ final_entity["type"] = "asset"
+
+ new_entity_id = new_entity["id"]
+ new_entity_data = {
+ "entity": new_entity,
+ "parent_id": parent_id,
+ "entity_type": _type.lower(),
+ "entity_type_orig": _type,
+ "name": _name,
+ "final_entity": final_entity
+ }
+ for k, v in new_entity_data.items():
+ self.entities_dict[new_entity_id][k] = v
+
+ p_chilren = self.entities_dict[parent_id]["children"]
+ if new_entity_id not in p_chilren:
+ self.entities_dict[parent_id]["children"].append(new_entity_id)
+
+ cust_attr, hier_attrs = self.get_avalon_attr()
+ for _attr in cust_attr:
+ key = _attr["key"]
+ if key not in av_entity["data"]:
+ continue
+
+ if key not in new_entity["custom_attributes"]:
+ continue
+
+ value = av_entity["data"][key]
+ if not value:
+ continue
+
+ new_entity["custom_attributes"][key] = value
+
+ av_entity_id = str(av_entity["_id"])
+ new_entity["custom_attributes"][self.id_cust_attr] = av_entity_id
+
+ self.ftrack_avalon_mapper[new_entity_id] = av_entity_id
+ self.avalon_ftrack_mapper[av_entity_id] = new_entity_id
+
+ self.session.commit()
+
+ ent_path = self.get_ent_path(new_entity_id)
+ msg = (
+ "Deleted entity was recreated because had (or his children)"
+ " published context"
+ )
+
+ self.report_items["info"][msg].append(ent_path)
+
+ return new_entity_id
+
+ def regex_duplicate_interface(self):
+ items = []
+ if self.failed_regex or self.tasks_failed_regex:
+ subtitle = "Not allowed symbols in entity names:"
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: Allowed symbols are Letters( a-Z ),"
+ " Numbers( 0-9 ) and Underscore( _ )
"
+ )
+ })
+ log_msgs = []
+ for name, ids in self.failed_regex.items():
+ error_title = {
+ "type": "label",
+ "value": "## {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
+
+ for name, ids in self.tasks_failed_regex.items():
+ error_title = {
+ "type": "label",
+ "value": "## Task: {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ ent_path = "/".join([ent_path, name])
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ",".join(paths)))
+
+ self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
+
+ if self.duplicates:
+ subtitle = "Duplicated entity names:"
+ items.append({
+ "type": "label",
+ "value": "# {}".format(subtitle)
+ })
+ items.append({
+ "type": "label",
+ "value": (
+ "NOTE: It is not allowed to have same name"
+ " for multiple entities in one project
"
+ )
+ })
+ log_msgs = []
+ for name, ids in self.duplicates.items():
+ error_title = {
+ "type": "label",
+ "value": "## {}".format(name)
+ }
+ items.append(error_title)
+ paths = []
+ for entity_id in ids:
+ ent_path = self.get_ent_path(entity_id)
+ paths.append(ent_path)
+
+ error_message = {
+ "type": "label",
+ "value": '{}
'.format("
".join(paths))
+ }
+ items.append(error_message)
+ log_msgs.append("<{}> ({})".format(name, ", ".join(paths)))
+
+ self.log.warning("{}{}".format(subtitle, ", ".join(log_msgs)))
+
+ return items
+
+ def get_avalon_attr(self, split_hierarchical=True):
+ custom_attributes = []
+ hier_custom_attributes = []
+ cust_attrs_query = (
+ "select id, entity_type, object_type_id, is_hierarchical, default"
+ " from CustomAttributeConfiguration"
+ " where group.name = \"avalon\""
+ )
+ all_avalon_attr = self.session.query(cust_attrs_query).all()
+ for cust_attr in all_avalon_attr:
+ if split_hierarchical and cust_attr["is_hierarchical"]:
+ hier_custom_attributes.append(cust_attr)
+ continue
+
+ custom_attributes.append(cust_attr)
+
+ if split_hierarchical:
+ # return tuple
+ return custom_attributes, hier_custom_attributes
+
+ return custom_attributes
+
+ def report(self):
+ items = []
+ project_name = self.entities_dict[self.ft_project_id]["name"]
+ title = "Synchronization report ({}):".format(project_name)
+
+ keys = ["error", "warning", "info"]
+ for key in keys:
+ subitems = []
+ if key == "warning":
+ for _item in self.regex_duplicate_interface():
+ subitems.append(_item)
+
+ for msg, _items in self.report_items[key].items():
+ if not _items:
+ continue
+
+ subitems.append({
+ "type": "label",
+ "value": "# {}".format(msg)
+ })
+ if isinstance(_items, str):
+ _items = [_items]
+ subitems.append({
+ "type": "label",
+ "value": '{}
'.format("
".join(_items))
+ })
+
+ if items and subitems:
+ items.append(self.report_splitter)
+
+ items.extend(subitems)
+
+ return {
+ "items": items,
+ "title": title,
+ "success": False,
+ "message": "Synchronization Finished"
}
-def register(session, plugins_presets):
+class SyncToAvalonServer(BaseAction):
+ """
+ Synchronizing data action - from Ftrack to Avalon DB
+
+ Stores all information about entity.
+ - Name(string) - Most important information = identifier of entity
+ - Parent(ObjectId) - Avalon Project Id, if entity is not project itself
+ - Data(dictionary):
+ - VisualParent(ObjectId) - Avalon Id of parent asset
+ - Parents(array of string) - All parent names except project
+ - Tasks(array of string) - Tasks on asset
+ - FtrackId(string)
+ - entityType(string) - entity's type on Ftrack
+ * All Custom attributes in group 'Avalon'
+ - custom attributes that start with 'avalon_' are skipped
+
+ * These information are stored for entities in whole project.
+
+ Avalon ID of asset is stored to Ftrack
+ - Custom attribute 'avalon_mongo_id'.
+ - action IS NOT creating this Custom attribute if doesn't exist
+ - run 'Create Custom Attributes' action
+ - or do it manually (Not recommended)
+ """
+ #: Action identifier.
+ identifier = "sync.to.avalon.server"
+ #: Action label.
+ label = "Pype Admin"
+ variant = "- Sync To Avalon (Server)"
+ #: Action description.
+ description = "Send data from Ftrack to Avalon"
+ #: Action icon.
+ icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
+ os.environ.get(
+ "PYPE_STATICS_SERVER",
+ "http://localhost:{}".format(
+ config.get_presets().get("services", {}).get(
+ "statics_server", {}
+ ).get("default_port", 8021)
+ )
+ )
+ )
+ #: roles that are allowed to register this action
+ role_list = ["Pypeclub"]
+
+ def discover(self, session, entities, event):
+ """ Validation """
+ # Check if selection is valid
+ valid_selection = False
+ for ent in event["data"]["selection"]:
+ # Ignore entities that are not tasks or projects
+ if ent["entityType"].lower() in ["show", "task"]:
+ valid_selection = True
+ break
+
+ if not valid_selection:
+ return False
+
+ # Get user and check his roles
+ user_id = event.get("source", {}).get("user", {}).get("id")
+ if not user_id:
+ return False
+
+ user = session.query("User where id is \"{}\"".format(user_id)).first()
+ if not user:
+ return False
+
+ role_list = ["Pypeclub", "Administrator", "Project Manager"]
+ for role in user["user_security_roles"]:
+ if role["security_role"]["name"] in role_list:
+ return True
+ break
+
+ return False
+
+ def launch(self, session, in_entities, event):
+ time_start = time.time()
+
+ self.show_message(event, "Synchronization - Preparing data", True)
+ # Get ftrack project
+ if in_entities[0].entity_type.lower() == "project":
+ ft_project_name = in_entities[0]["full_name"]
+ else:
+ ft_project_name = in_entities[0]["project"]["full_name"]
+
+ try:
+ entities_factory = SyncEntitiesFactory(
+ self.log, session, ft_project_name
+ )
+ time_1 = time.time()
+
+ entities_factory.set_cutom_attributes()
+ time_2 = time.time()
+
+ # This must happen before all filtering!!!
+ entities_factory.prepare_avalon_entities(ft_project_name)
+ time_3 = time.time()
+
+ entities_factory.filter_by_ignore_sync()
+ time_4 = time.time()
+
+ entities_factory.duplicity_regex_check()
+ time_5 = time.time()
+
+ entities_factory.prepare_ftrack_ent_data()
+ time_6 = time.time()
+
+ entities_factory.synchronize()
+ time_7 = time.time()
+
+ self.log.debug(
+ "*** Synchronization finished ***"
+ )
+ self.log.debug(
+ "preparation <{}>".format(time_1 - time_start)
+ )
+ self.log.debug(
+ "set_cutom_attributes <{}>".format(time_2 - time_1)
+ )
+ self.log.debug(
+ "prepare_avalon_entities <{}>".format(time_3 - time_2)
+ )
+ self.log.debug(
+ "filter_by_ignore_sync <{}>".format(time_4 - time_3)
+ )
+ self.log.debug(
+ "duplicity_regex_check <{}>".format(time_5 - time_4)
+ )
+ self.log.debug(
+ "prepare_ftrack_ent_data <{}>".format(time_6 - time_5)
+ )
+ self.log.debug(
+ "synchronize <{}>".format(time_7 - time_6)
+ )
+ self.log.debug(
+ "* Total time: {}".format(time_7 - time_start)
+ )
+
+ report = entities_factory.report()
+ if report and report.get("items"):
+ default_title = "Synchronization report ({}):".format(
+ ft_project_name
+ )
+ self.show_interface(
+ items=report["items"],
+ title=report.get("title", default_title),
+ event=event
+ )
+ return {
+ "success": True,
+ "message": "Synchronization Finished"
+ }
+
+ except Exception:
+ self.log.error(
+ "Synchronization failed due to code error", exc_info=True
+ )
+ msg = "An error has happened during synchronization"
+ title = "Synchronization report ({}):".format(ft_project_name)
+ items = []
+ items.append({
+ "type": "label",
+ "value": "# {}".format(msg)
+ })
+ items.append({
+ "type": "label",
+ "value": "## Traceback of the error"
+ })
+ items.append({
+ "type": "label",
+ "value": "{}
".format(
+ str(traceback.format_exc()).replace(
+ "\n", "
").replace(
+ " ", " "
+ )
+ )
+ })
+
+ report = {"items": []}
+ try:
+ report = entities_factory.report()
+ except Exception:
+ pass
+
+ _items = report.get("items", [])
+ if _items:
+ items.append(entities_factory.report_splitter)
+ items.extend(_items)
+
+ self.show_interface(items, title, event)
+
+ return {"success": True, "message": msg}
+
+ finally:
+ try:
+ entities_factory.dbcon.uninstall()
+ except Exception:
+ pass
+
+ try:
+ entities_factory.session.close()
+ except Exception:
+ pass
+
+def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
- # Validate that session is an instance of ftrack_api.Session. If not,
- # assume that register is being called from an old or incompatible API and
- # return without doing anything.
- SyncToAvalon(session, plugins_presets).register()
-
-
-def main(arguments=None):
- '''Set up logging and register action.'''
- if arguments is None:
- arguments = []
-
- parser = argparse.ArgumentParser()
- # Allow setting of logging level from arguments.
- loggingLevels = {}
- for level in (
- logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
- logging.ERROR, logging.CRITICAL
- ):
- loggingLevels[logging.getLevelName(level).lower()] = level
-
- parser.add_argument(
- '-v', '--verbosity',
- help='Set the logging output verbosity.',
- choices=loggingLevels.keys(),
- default='info'
- )
- namespace = parser.parse_args(arguments)
-
- # Set up basic logging
- logging.basicConfig(level=loggingLevels[namespace.verbosity])
-
- session = ftrack_api.Session()
- register(session)
-
- # Wait for events
- logging.info(
- 'Registered actions and listening for events. Use Ctrl-C to abort.'
- )
- session.event_hub.wait()
-
-
-if __name__ == '__main__':
- raise SystemExit(main(sys.argv[1:]))
+ SyncToAvalonServer(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_next_task_update.py b/pype/ftrack/events/event_next_task_update.py
index 51ccb2f057..68db07d03d 100644
--- a/pype/ftrack/events/event_next_task_update.py
+++ b/pype/ftrack/events/event_next_task_update.py
@@ -80,10 +80,10 @@ class NextTaskUpdate(BaseEvent):
'>>> [ {} ] updated to [ Ready ]'
).format(path))
except Exception as e:
+ session.rollback()
self.log.warning((
'!!! [ {} ] status couldnt be set: [ {} ]'
- ).format(path, e))
- session.rollback()
+ ).format(path, str(e)), exc_info=True)
def register(session, plugins_presets):
diff --git a/pype/ftrack/events/event_radio_buttons.py b/pype/ftrack/events/event_radio_buttons.py
index 917c7a49e6..56d4c4cb65 100644
--- a/pype/ftrack/events/event_radio_buttons.py
+++ b/pype/ftrack/events/event_radio_buttons.py
@@ -2,7 +2,7 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent
-class Radio_buttons(BaseEvent):
+class RadioButtons(BaseEvent):
ignore_me = True
diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py
index 3fe65bca11..6ede2a4d11 100644
--- a/pype/ftrack/events/event_sync_to_avalon.py
+++ b/pype/ftrack/events/event_sync_to_avalon.py
@@ -2,7 +2,7 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent, lib
-class Sync_to_Avalon(BaseEvent):
+class SyncToAvalon(BaseEvent):
priority = 100
@@ -101,7 +101,7 @@ class Sync_to_Avalon(BaseEvent):
avalon_project = result['project']
except Exception as e:
- session.reset() # reset session to clear it
+ session.rollback() # reset session to clear it
message = str(e)
title = 'Hey You! Unknown Error has been raised! (*look below*)'
@@ -124,4 +124,4 @@ class Sync_to_Avalon(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
- Sync_to_Avalon(session, plugins_presets).register()
+ SyncToAvalon(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_test.py b/pype/ftrack/events/event_test.py
index a909aa5510..bcea5b47e4 100644
--- a/pype/ftrack/events/event_test.py
+++ b/pype/ftrack/events/event_test.py
@@ -5,7 +5,7 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent
-class Test_Event(BaseEvent):
+class TestEvent(BaseEvent):
ignore_me = True
@@ -23,4 +23,4 @@ class Test_Event(BaseEvent):
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
- Test_Event(session, plugins_presets).register()
+ TestEvent(session, plugins_presets).register()
diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py
index 66a55c0cf7..607a31f44e 100644
--- a/pype/ftrack/events/event_version_to_task_statuses.py
+++ b/pype/ftrack/events/event_version_to_task_statuses.py
@@ -62,6 +62,7 @@ class VersionToTaskStatus(BaseEvent):
task['status'] = task_status
session.commit()
except Exception as e:
+ session.rollback()
self.log.warning('!!! [ {} ] status couldnt be set:\
[ {} ]'.format(path, e))
else:
diff --git a/pype/ftrack/ftrack_server/__init__.py b/pype/ftrack/ftrack_server/__init__.py
index 0861a1bc08..fcae4e0690 100644
--- a/pype/ftrack/ftrack_server/__init__.py
+++ b/pype/ftrack/ftrack_server/__init__.py
@@ -1 +1,2 @@
from .ftrack_server import FtrackServer
+from .lib import check_ftrack_url
diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py
index 8dd503f845..e14fd705f1 100644
--- a/pype/ftrack/ftrack_server/event_server_cli.py
+++ b/pype/ftrack/ftrack_server/event_server_cli.py
@@ -9,11 +9,12 @@ import atexit
import time
from urllib.parse import urlparse
-import requests
from pype.vendor import ftrack_api
from pype.ftrack.lib import credentials
from pype.ftrack.ftrack_server import FtrackServer
-from pype.ftrack.ftrack_server.lib import ftrack_events_mongo_settings
+from pype.ftrack.ftrack_server.lib import (
+ ftrack_events_mongo_settings, check_ftrack_url
+)
import socket_thread
@@ -25,36 +26,6 @@ class MongoPermissionsError(Exception):
super().__init__(message)
-def check_ftrack_url(url, log_errors=True):
- """Checks if Ftrack server is responding"""
- if not url:
- print('ERROR: Ftrack URL is not set!')
- return None
-
- url = url.strip('/ ')
-
- if 'http' not in url:
- if url.endswith('ftrackapp.com'):
- url = 'https://' + url
- else:
- url = 'https://{0}.ftrackapp.com'.format(url)
- try:
- result = requests.get(url, allow_redirects=False)
- except requests.exceptions.RequestException:
- if log_errors:
- print('ERROR: Entered Ftrack URL is not accesible!')
- return False
-
- if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
- if log_errors:
- print('ERROR: Entered Ftrack URL is not accesible!')
- return False
-
- print('DEBUG: Ftrack server {} is accessible.'.format(url))
-
- return url
-
-
def check_mongo_url(host, port, log_error=False):
"""Checks if mongo server is responding"""
sock = None
diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py
index 12b046c510..dd4f9a9c69 100644
--- a/pype/ftrack/ftrack_server/ftrack_server.py
+++ b/pype/ftrack/ftrack_server/ftrack_server.py
@@ -100,7 +100,10 @@ class FtrackServer:
log.warning(msg, exc_info=e)
if len(register_functions_dict) < 1:
- raise Exception
+ raise Exception((
+ "There are no events with register function."
+ " Registered paths: \"{}\""
+ ).format("| ".join(paths)))
# Load presets for setting plugins
key = "user"
diff --git a/pype/ftrack/ftrack_server/lib.py b/pype/ftrack/ftrack_server/lib.py
index 12159693fe..748937c7bd 100644
--- a/pype/ftrack/ftrack_server/lib.py
+++ b/pype/ftrack/ftrack_server/lib.py
@@ -1,4 +1,5 @@
import os
+import requests
try:
from urllib.parse import urlparse, parse_qs
except ImportError:
@@ -66,3 +67,33 @@ def get_ftrack_event_mongo_info():
url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth)
return url, database, collection
+
+
+def check_ftrack_url(url, log_errors=True):
+ """Checks if Ftrack server is responding"""
+ if not url:
+ print('ERROR: Ftrack URL is not set!')
+ return None
+
+ url = url.strip('/ ')
+
+ if 'http' not in url:
+ if url.endswith('ftrackapp.com'):
+ url = 'https://' + url
+ else:
+ url = 'https://{0}.ftrackapp.com'.format(url)
+ try:
+ result = requests.get(url, allow_redirects=False)
+ except requests.exceptions.RequestException:
+ if log_errors:
+ print('ERROR: Entered Ftrack URL is not accesible!')
+ return False
+
+ if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
+ if log_errors:
+ print('ERROR: Entered Ftrack URL is not accesible!')
+ return False
+
+ print('DEBUG: Ftrack server {} is accessible.'.format(url))
+
+ return url
diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py
index f22f80e7f1..200006dd2d 100644
--- a/pype/ftrack/lib/ftrack_base_handler.py
+++ b/pype/ftrack/lib/ftrack_base_handler.py
@@ -128,6 +128,7 @@ class BaseHandler(object):
try:
return func(*args, **kwargs)
except Exception as exc:
+ self.session.rollback()
msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
self.log.error(msg, exc_info=True)
return {
diff --git a/pype/ftrack/lib/ftrack_event_handler.py b/pype/ftrack/lib/ftrack_event_handler.py
index db55eef16e..80a86527ab 100644
--- a/pype/ftrack/lib/ftrack_event_handler.py
+++ b/pype/ftrack/lib/ftrack_event_handler.py
@@ -26,6 +26,7 @@ class BaseEvent(BaseHandler):
try:
func(*args, **kwargs)
except Exception as exc:
+ self.session.rollback()
self.log.error(
'Event "{}" Failed: {}'.format(
self.__class__.__name__, str(exc)
diff --git a/pype/ftrack/lib/io_nonsingleton.py b/pype/ftrack/lib/io_nonsingleton.py
index 3490ef03be..6380e4eb23 100644
--- a/pype/ftrack/lib/io_nonsingleton.py
+++ b/pype/ftrack/lib/io_nonsingleton.py
@@ -50,6 +50,19 @@ class DbConnector(object):
self._database = None
self._is_installed = False
+ def __getitem__(self, key):
+ # gives direct access to collection withou setting `active_table`
+ return self._database[key]
+
+ def __getattribute__(self, attr):
+ # not all methods of PyMongo database are implemented with this it is
+ # possible to use them too
+ try:
+ return super(DbConnector, self).__getattribute__(attr)
+ except AttributeError:
+ cur_proj = self.Session["AVALON_PROJECT"]
+ return self._database[cur_proj].__getattribute__(attr)
+
def install(self):
"""Establish a persistent connection to the database"""
if self._is_installed:
diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py
index ce2754c25d..9a11a47a3a 100644
--- a/pype/ftrack/tray/ftrack_module.py
+++ b/pype/ftrack/tray/ftrack_module.py
@@ -6,7 +6,7 @@ from Qt import QtCore, QtGui, QtWidgets
from pype.vendor import ftrack_api
from pypeapp import style
-from pype.ftrack import FtrackServer, credentials
+from pype.ftrack import FtrackServer, check_ftrack_url, credentials
from . import login_dialog
from pype import api as pype
@@ -24,7 +24,8 @@ class FtrackModule:
self.thread_timer = None
self.bool_logged = False
- self.bool_action_server = False
+ self.bool_action_server_running = False
+ self.bool_action_thread_running = False
self.bool_timer_event = False
def show_login_widget(self):
@@ -74,28 +75,50 @@ class FtrackModule:
# Actions part
def start_action_server(self):
+ self.bool_action_thread_running = True
+ self.set_menu_visibility()
+ if (
+ self.thread_action_server is not None and
+ self.bool_action_thread_running is False
+ ):
+ self.stop_action_server()
+
if self.thread_action_server is None:
self.thread_action_server = threading.Thread(
target=self.set_action_server
)
- self.thread_action_server.daemon = True
self.thread_action_server.start()
- log.info("Ftrack action server launched")
- self.bool_action_server = True
- self.set_menu_visibility()
-
def set_action_server(self):
- try:
- self.action_server.run_server()
- except Exception as exc:
- log.error(
- "Ftrack Action server crashed! Please try to start again.",
- exc_info=True
+ first_check = True
+ while self.bool_action_thread_running is True:
+ if not check_ftrack_url(os.environ['FTRACK_SERVER']):
+ if first_check:
+ log.warning(
+ "Could not connect to Ftrack server"
+ )
+ first_check = False
+ time.sleep(1)
+ continue
+ log.info(
+ "Connected to Ftrack server. Running actions session"
)
- # TODO show message to user
- self.bool_action_server = False
+ try:
+ self.bool_action_server_running = True
+ self.set_menu_visibility()
+ self.action_server.run_server()
+ if self.bool_action_thread_running:
+ log.debug("Ftrack action server has stopped")
+ except Exception:
+ log.warning(
+ "Ftrack Action server crashed. Trying to connect again",
+ exc_info=True
+ )
+ self.bool_action_server_running = False
self.set_menu_visibility()
+ first_check = True
+
+ self.bool_action_thread_running = False
def reset_action_server(self):
self.stop_action_server()
@@ -103,16 +126,21 @@ class FtrackModule:
def stop_action_server(self):
try:
+ self.bool_action_thread_running = False
self.action_server.stop_session()
if self.thread_action_server is not None:
self.thread_action_server.join()
self.thread_action_server = None
- log.info("Ftrack action server stopped")
- self.bool_action_server = False
+ log.info("Ftrack action server was forced to stop")
+
+ self.bool_action_server_running = False
self.set_menu_visibility()
- except Exception as e:
- log.error("During Killing action server: {0}".format(e))
+ except Exception:
+ log.warning(
+ "Error has happened during Killing action server",
+ exc_info=True
+ )
# Definition of Tray menu
def tray_menu(self, parent_menu):
@@ -158,6 +186,9 @@ class FtrackModule:
def tray_start(self):
self.validate()
+ def tray_exit(self):
+ self.stop_action_server()
+
# Definition of visibility of each menu actions
def set_menu_visibility(self):
@@ -170,9 +201,9 @@ class FtrackModule:
self.stop_timer_thread()
return
- self.aRunActionS.setVisible(not self.bool_action_server)
- self.aResetActionS.setVisible(self.bool_action_server)
- self.aStopActionS.setVisible(self.bool_action_server)
+ self.aRunActionS.setVisible(not self.bool_action_thread_running)
+ self.aResetActionS.setVisible(self.bool_action_thread_running)
+ self.aStopActionS.setVisible(self.bool_action_thread_running)
if self.bool_timer_event is False:
self.start_timer_thread()
diff --git a/pype/lib.py b/pype/lib.py
index 6f6895085e..ef48fac29b 100644
--- a/pype/lib.py
+++ b/pype/lib.py
@@ -562,7 +562,7 @@ def get_subsets(asset_name,
find_dict = {"type": "representation",
"parent": version_sel["_id"]}
- filter_repr = {"$or": [{"name": repr} for repr in representations]}
+ filter_repr = {"name": {"$in": representations}}
find_dict.update(filter_repr)
repres_out = [i for i in io.find(find_dict)]
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py
index 87b14b612b..b7d9a2855d 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py
@@ -1,5 +1,6 @@
import os
import sys
+import six
import pyblish.api
import clique
@@ -125,6 +126,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
metadata=asset_metadata
)
)
+ try:
+ session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)
# Adding metadata
existing_asset_metadata = asset_entity["metadata"]
@@ -162,6 +169,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
metadata=assetversion_metadata
)
)
+ try:
+ session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)
# Adding metadata
existing_assetversion_metadata = assetversion_entity["metadata"]
@@ -170,7 +183,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
# Have to commit the version and asset, because location can't
# determine the final location without.
- session.commit()
+ try:
+ session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)
# Component
# Get existing entity.
@@ -209,7 +227,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
session.delete(member)
del(member)
- session.commit()
+ try:
+ session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)
# Reset members in memory
if "members" in component_entity.keys():
@@ -320,4 +343,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
)
else:
# Commit changes.
- session.commit()
+ try:
+ session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py
index 889a78e254..9d0b7b3ab9 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_comments.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_comments.py
@@ -1,4 +1,6 @@
+import sys
import pyblish.api
+import six
class IntegrateFtrackComments(pyblish.api.InstancePlugin):
@@ -21,4 +23,9 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin):
entity["notes"].extend(notes)
- session.commit()
+ try:
+ session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
index 976250da00..f504a52f9e 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
@@ -37,6 +37,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
+ else:
+ raise ValueError("Instance version not set")
family = instance.data['family'].lower()
diff --git a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py
index 73a4d8af97..6e2017ca4e 100644
--- a/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py
+++ b/pype/plugins/ftrack/publish/integrate_hierarchy_ftrack.py
@@ -1,3 +1,6 @@
+import sys
+
+import six
import pyblish.api
from avalon import io
@@ -74,9 +77,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# try to find if entity already exists
else:
- query = 'TypedContext where name is "{0}" and project.full_name is "{1}"'.format(
- entity_name, self.ft_project["full_name"]
- )
+ query = (
+ 'TypedContext where name is "{0}" and '
+ 'project_id is "{1}"'
+ ).format(entity_name, self.ft_project["id"])
try:
entity = self.session.query(query).one()
except Exception:
@@ -106,7 +110,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
for instance in instances:
instance.data['ftrackEntity'] = entity
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
# TASKS
tasks = entity_data.get('tasks', [])
@@ -129,11 +138,21 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
task_type=task,
parent=entity
)
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
# Incoming links.
self.create_links(entity_data, entity)
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
if 'childs' in entity_data:
self.import_to_ftrack(
@@ -143,7 +162,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# Clear existing links.
for link in entity.get("incoming_links", []):
self.session.delete(link)
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
# Create new links.
for input in entity_data.get("inputs", []):
@@ -179,7 +203,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
return task
@@ -188,6 +217,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
'name': name,
'parent': parent
})
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
return entity
diff --git a/pype/plugins/global/publish/collect_scene_version.py b/pype/plugins/global/publish/collect_scene_version.py
index 0d76015909..2844a695e2 100644
--- a/pype/plugins/global/publish/collect_scene_version.py
+++ b/pype/plugins/global/publish/collect_scene_version.py
@@ -24,4 +24,4 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
rootVersion = pype.get_version_from_path(filename)
context.data['version'] = rootVersion
- self.log.info('Scene Version: %s' % context.data('version'))
+ self.log.info('Scene Version: %s' % context.data.get('version'))
diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py
index 5032e593a6..f0619eb776 100644
--- a/pype/plugins/global/publish/integrate_new.py
+++ b/pype/plugins/global/publish/integrate_new.py
@@ -1,18 +1,23 @@
import os
from os.path import getsize
import logging
-import speedcopy
+import sys
import clique
import errno
import pyblish.api
from avalon import api, io
from avalon.vendor import filelink
+# this is needed until speedcopy for linux is fixed
+if sys.platform == "win32":
+ from speedcopy import copyfile
+else:
+ from shutil import copyfile
log = logging.getLogger(__name__)
class IntegrateAssetNew(pyblish.api.InstancePlugin):
- """Resolve any dependency issius
+ """Resolve any dependency issues
This plug-in resolves any paths which, if not updated might break
the published file.
@@ -474,7 +479,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical
while True:
- speedcopy.copyfile(src, dst)
+ copyfile(src, dst)
if str(getsize(src)) in str(getsize(dst)):
break
@@ -500,7 +505,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
self.log.debug("families. %s" % instance.data.get('families'))
- self.log.debug("families. %s" % type(instance.data.get('families')))
+ self.log.debug(
+ "families. %s" % type(instance.data.get('families')))
_id = io.insert_one({
"schema": "pype:subset-3.0",
diff --git a/pype/plugins/global/publish/validate_templates.py b/pype/plugins/global/publish/validate_templates.py
index a2c7b24ac1..f24f6b1a2e 100644
--- a/pype/plugins/global/publish/validate_templates.py
+++ b/pype/plugins/global/publish/validate_templates.py
@@ -1,8 +1,9 @@
import pyblish.api
import os
+
class ValidateTemplates(pyblish.api.ContextPlugin):
- """Check if all templates were filed"""
+ """Check if all templates were filled"""
label = "Validate Templates"
order = pyblish.api.ValidatorOrder - 0.1
@@ -18,12 +19,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
"project": {"name": "D001_projectsx",
"code": "prjX"},
- "ext": "exr",
- "version": 3,
- "task": "animation",
- "asset": "sh001",
- "hierarchy": "ep101/sq01/sh010"}
-
+ "ext": "exr",
+ "version": 3,
+ "task": "animation",
+ "asset": "sh001",
+ "app": "maya",
+ "hierarchy": "ep101/sq01/sh010"}
anatomy_filled = anatomy.format(data)
self.log.info(anatomy_filled)
@@ -31,11 +32,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
"project": {"name": "D001_projectsy",
"code": "prjY"},
- "ext": "abc",
- "version": 1,
- "task": "lookdev",
- "asset": "bob",
- "hierarchy": "ep101/sq01/bob"}
+ "ext": "abc",
+ "version": 1,
+ "task": "lookdev",
+ "asset": "bob",
+ "app": "maya",
+ "hierarchy": "ep101/sq01/bob"}
anatomy_filled = context.data["anatomy"].format(data)
self.log.info(anatomy_filled["work"]["folder"])
diff --git a/pype/plugins/maya/create/create_renderglobals.py b/pype/plugins/maya/create/create_renderglobals.py
index ac6048a3e7..7c71bfbc36 100644
--- a/pype/plugins/maya/create/create_renderglobals.py
+++ b/pype/plugins/maya/create/create_renderglobals.py
@@ -38,7 +38,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
self.log.warning("Deadline REST API url not found.")
else:
argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
- response = requests.get(argument)
+ response = self._requests_get(argument)
if not response.ok:
self.log.warning("No pools retrieved")
else:
@@ -135,7 +135,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
'authToken': self._token
}
api_entry = '/api/pools/list'
- response = requests.get(
+ response = self._requests_get(
self.MUSTER_REST_URL + api_entry, params=params)
if response.status_code != 200:
if response.status_code == 401:
diff --git a/pype/plugins/maya/publish/validate_assembly_name.py b/pype/plugins/maya/publish/validate_assembly_name.py
new file mode 100644
index 0000000000..2a3a92e950
--- /dev/null
+++ b/pype/plugins/maya/publish/validate_assembly_name.py
@@ -0,0 +1,50 @@
+import pyblish.api
+import maya.cmds as cmds
+import pype.maya.action
+
+
+class ValidateAssemblyName(pyblish.api.InstancePlugin):
+ """ Ensure Assembly name ends with `GRP`
+
+ Check if assembly name ends with `_GRP` string.
+ """
+
+ label = "Validate Assembly Name"
+ order = pyblish.api.ValidatorOrder
+ families = ["assembly"]
+ actions = [pype.maya.action.SelectInvalidAction]
+ active = False
+
+ @classmethod
+ def get_invalid(cls, instance):
+ cls.log.info("Checking name of {}".format(instance.name))
+
+ content_instance = instance.data.get("setMembers", None)
+ if not content_instance:
+ cls.log.error("Instance has no nodes!")
+ return True
+
+ # All children will be included in the extracted export so we also
+ # validate *all* descendents of the set members and we skip any
+ # intermediate shapes
+ descendants = cmds.listRelatives(content_instance,
+ allDescendents=True,
+ fullPath=True) or []
+ descendants = cmds.ls(descendants, noIntermediate=True, long=True)
+ content_instance = list(set(content_instance + descendants))
+ assemblies = cmds.ls(content_instance, assemblies=True, long=True)
+
+ invalid = []
+ for cr in assemblies:
+ if not cr.endswith('_GRP'):
+ cls.log.error("{} doesn't end with _GRP".format(cr))
+ invalid.append(cr)
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found {} invalid named assembly "
+ "items".format(len(invalid)))
diff --git a/pype/plugins/maya/publish/validate_model_name.py b/pype/plugins/maya/publish/validate_model_name.py
new file mode 100644
index 0000000000..89c629c5a4
--- /dev/null
+++ b/pype/plugins/maya/publish/validate_model_name.py
@@ -0,0 +1,98 @@
+from maya import cmds
+import pyblish.api
+import pype.api
+import pype.maya.action
+import re
+
+
+class ValidateModelName(pyblish.api.InstancePlugin):
+ """Validate name of model
+
+ starts with (somename)_###_(materialID)_GEO
+ materialID must be present in list
+ padding number doesn't have limit
+
+ """
+ optional = True
+ order = pype.api.ValidateContentsOrder
+ hosts = ["maya"]
+ families = ["model"]
+ label = "Model Name"
+ actions = [pype.maya.action.SelectInvalidAction]
+ # path to shader names definitions
+ # TODO: move it to preset file
+ material_file = None
+ active = False
+ regex = '(.*)_(\\d)*_(.*)_(GEO)'
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ # find out if supplied transform is group or not
+ def is_group(groupName):
+ try:
+ children = cmds.listRelatives(groupName, children=True)
+ for child in children:
+ if not cmds.ls(child, transforms=True):
+ return False
+ return True
+ except:
+ return False
+
+ invalid = []
+ content_instance = instance.data.get("setMembers", None)
+ if not content_instance:
+ cls.log.error("Instance has no nodes!")
+ return True
+ pass
+ descendants = cmds.listRelatives(content_instance,
+ allDescendents=True,
+ fullPath=True) or []
+
+ descendants = cmds.ls(descendants, noIntermediate=True, long=True)
+ trns = cmds.ls(descendants, long=False, type=('transform'))
+
+ # filter out groups
+ filter = [node for node in trns if not is_group(node)]
+
+ # load shader list file as utf-8
+ if cls.material_file:
+ shader_file = open(cls.material_file, "r")
+ shaders = shader_file.readlines()
+ shader_file.close()
+
+ # strip line endings from list
+ shaders = map(lambda s: s.rstrip(), shaders)
+
+ # compile regex for testing names
+ r = re.compile(cls.regex)
+
+ for obj in filter:
+ m = r.match(obj)
+ if m is None:
+ cls.log.error("invalid name on: {}".format(obj))
+ invalid.append(obj)
+ else:
+ # if we have shader files and shader named group is in
+ # regex, test this group against names in shader file
+ if 'shader' in r.groupindex and shaders:
+ try:
+ if not m.group('shader') in shaders:
+ cls.log.error(
+ "invalid materialID on: {0} ({1})".format(
+ obj, m.group('shader')))
+ invalid.append(obj)
+ except IndexError:
+ # shader named group doesn't match
+ cls.log.error(
+ "shader group doesn't match: {}".format(obj))
+ invalid.append(obj)
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise RuntimeError("Model naming is invalid. See log.")
diff --git a/pype/plugins/maya/publish/validate_shader_name.py b/pype/plugins/maya/publish/validate_shader_name.py
new file mode 100644
index 0000000000..c6f72a2940
--- /dev/null
+++ b/pype/plugins/maya/publish/validate_shader_name.py
@@ -0,0 +1,78 @@
+from maya import cmds
+
+import pyblish.api
+import pype.api
+import pype.maya.action
+import re
+
+
+class ValidateShaderName(pyblish.api.InstancePlugin):
+ """Validate shader name assigned.
+
+ It should be _<*>_SHD
+
+ """
+ optional = True
+ active = False
+ order = pype.api.ValidateContentsOrder
+ families = ["look"]
+ hosts = ['maya']
+ label = 'Validate Shaders Name'
+ actions = [pype.maya.action.SelectInvalidAction]
+ regex = r'(?P.*)_(.*)_SHD'
+
+ # The default connections to check
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Found shapes with invalid shader names "
+ "assigned: "
+ "\n{}".format(invalid))
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = []
+
+ # Get all shapes from the instance
+ content_instance = instance.data.get("setMembers", None)
+ if not content_instance:
+ cls.log.error("Instance has no nodes!")
+ return True
+ pass
+ descendants = cmds.listRelatives(content_instance,
+ allDescendents=True,
+ fullPath=True) or []
+
+ descendants = cmds.ls(descendants, noIntermediate=True, long=True)
+ shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True)
+ asset_name = instance.data.get("asset", None)
+
+ # Check the number of connected shadingEngines per shape
+ r = re.compile(cls.regex)
+ for shape in shapes:
+ shading_engines = cmds.listConnections(shape,
+ destination=True,
+ type="shadingEngine") or []
+ shaders = cmds.ls(
+ cmds.listConnections(shading_engines), materials=1
+ )
+
+ for shader in shaders:
+ m = r.match(cls.regex, shader)
+ if m is None:
+ invalid.append(shape)
+ cls.log.error(
+ "object {0} has invalid shader name {1}".format(shape,
+ shader)
+ )
+ else:
+ if 'asset' in r.groupindex:
+ if m.group('asset') != asset_name:
+ invalid.append(shape)
+ cls.log.error(("object {0} has invalid "
+ "shader name {1}").format(shape,
+ shader))
+
+ return invalid
diff --git a/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py b/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py
index d6d03e9722..c32df636e1 100644
--- a/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py
+++ b/pype/plugins/premiere/publish/integrate_hierarchy_ftrack.py
@@ -1,4 +1,6 @@
+import sys
import pyblish.api
+import six
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
@@ -91,7 +93,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
for instance in instances:
instance.data['ftrackShotId'] = entity['id']
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
# TASKS
tasks = entity_data.get('tasks', [])
@@ -114,7 +121,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
task_type=ftrack_types[task],
parent=entity
)
- self.session.commit()
if 'childs' in entity_data:
self.import_to_ftrack(
@@ -141,7 +147,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
return task
@@ -150,6 +161,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
'name': name,
'parent': parent
})
- self.session.commit()
+ try:
+ self.session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ self.session.rollback()
+ six.reraise(tp, value, tb)
return entity
diff --git a/pype/plugins/premiere/publish/validate_auto_sync_off.py b/pype/plugins/premiere/publish/validate_auto_sync_off.py
index 3b46a682ab..b6429cfa05 100644
--- a/pype/plugins/premiere/publish/validate_auto_sync_off.py
+++ b/pype/plugins/premiere/publish/validate_auto_sync_off.py
@@ -1,6 +1,8 @@
+import sys
import pyblish.api
import pype.api
import avalon.api
+import six
class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
@@ -48,4 +50,9 @@ class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
session = context.data["ftrackSession"]
invalid = cls.get_invalid(context)
invalid['custom_attributes']['avalon_auto_sync'] = False
- session.commit()
+ try:
+ session.commit()
+ except Exception:
+ tp, value, tb = sys.exc_info()
+ session.rollback()
+ six.reraise(tp, value, tb)