diff --git a/pype/ftrack/actions/action_createCustomAttributes.py b/pype/ftrack/actions/action_createCustomAttributes.py index 5f9dbd7381..d45a92a01d 100644 --- a/pype/ftrack/actions/action_createCustomAttributes.py +++ b/pype/ftrack/actions/action_createCustomAttributes.py @@ -24,14 +24,18 @@ class AvalonIdAttribute(BaseAction): def discover(self, session, entities, event): - ''' Validation ''' + ''' + Validation + - action is only for Administrators + ''' + success = False + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + for role in user['user_security_roles']: + if role['security_role']['name'] == 'Administrator': + success = True - # userId = event['source']['user']['id'] - # user = session.query('User where id is ' + userId).one() - # if user['user_security_roles'][0]['security_role']['name'] != 'Administrator': - # return False - - return True + return success def launch(self, session, entities, event): @@ -49,13 +53,21 @@ class AvalonIdAttribute(BaseAction): }) session.commit() try: + # Checkbox for event sync + cbxSyncName = 'avalon_auto_sync' + cbxSyncLabel = 'Avalon auto-sync' + cbxSyncExist = False + # Attribute Name and Label custAttrName = 'avalon_mongo_id' custAttrLabel = 'Avalon/Mongo Id' + + attrs_update = set() # Types that don't need object_type_id base = {'show'} + # Don't create custom attribute on these entity types: - exceptions = ['task','milestone','library'] + exceptions = ['task', 'milestone'] exceptions.extend(base) # Get all possible object types all_obj_types = session.query('ObjectType').all() @@ -73,6 +85,7 @@ class AvalonIdAttribute(BaseAction): # Get IDs of filtered object types all_obj_types_id = set() + for obj in all_obj_types: all_obj_types_id.add(obj['id']) @@ -80,20 +93,60 @@ class AvalonIdAttribute(BaseAction): current_cust_attr = session.query('CustomAttributeConfiguration').all() # Filter already existing AvalonMongoID attr. for attr in current_cust_attr: + if attr['key'] == cbxSyncName: + cbxSyncExist = True + cbxAttribute = attr if attr['key'] == custAttrName: if attr['entity_type'] in base: base.remove(attr['entity_type']) + attrs_update.add(attr) if attr['object_type_id'] in all_obj_types_id: all_obj_types_id.remove(attr['object_type_id']) + attrs_update.add(attr) # Set session back to begin("session.query" raises error on commit) session.rollback() # Set security roles for attribute - custAttrSecuRole = session.query('SecurityRole').all() + role_api = session.query('SecurityRole where name is "API"').one() + role_admin = session.query('SecurityRole where name is "Administrator"').one() + roles = [role_api,role_admin] + # Set Text type of Attribute custom_attribute_type = session.query( 'CustomAttributeType where name is "text"' ).one() + # Get/Set 'avalon' group + groups = session.query('CustomAttributeGroup where name is "avalon"').all() + if len(groups) > 1: + msg = "There are more Custom attribute groups with name 'avalon'" + self.log.warning(msg) + return { 'success': False, 'message':msg } + + elif len(groups) < 1: + group = session.create('CustomAttributeGroup', { + 'name': 'avalon', + }) + session.commit() + else: + group = groups[0] + + # Checkbox for auto-sync event / Create or Update(roles + group) + if cbxSyncExist is False: + cbxType = session.query('CustomAttributeType where name is "boolean"').first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'show', + 'type': cbxType, + 'label': cbxSyncLabel, + 'key': cbxSyncName, + 'default': False, + 'write_security_roles': roles, + 'read_security_roles': roles, + 'group':group, + }) + else: + cbxAttribute['write_security_roles'] = roles + cbxAttribute['read_security_roles'] = roles + cbxAttribute['group'] = group for entity_type in base: # Create a custom attribute configuration. @@ -103,8 +156,9 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': custAttrSecuRole, - 'read_security_roles': custAttrSecuRole, + 'write_security_roles': roles, + 'read_security_roles': roles, + 'group':group, 'config': json.dumps({'markdown': False}) }) @@ -117,16 +171,24 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': custAttrSecuRole, - 'read_security_roles': custAttrSecuRole, + 'write_security_roles': roles, + 'read_security_roles': roles, + 'group':group, 'config': json.dumps({'markdown': False}) }) + for attr in attrs_update: + attr['write_security_roles'] = roles + attr['read_security_roles'] = roles + attr['group'] = group + job['status'] = 'done' session.commit() except Exception as e: + session.rollback() job['status'] = 'failed' + session.commit() self.log.error("Creating custom attributes failed ({})".format(e)) return True diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index 5af84b3042..c354f2332d 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -1,5 +1,3 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2017 ftrack import sys import argparse import logging @@ -7,13 +5,47 @@ import os import ftrack_api import json import re +from pype import lib from ftrack_action_handler import BaseAction +from bson.objectid import ObjectId +from avalon import io, inventory -from avalon import io, inventory, lib -from avalon.vendor import toml +from pype.ftrack import ftrack_utils class SyncToAvalon(BaseAction): - '''Edit meta data action.''' + ''' + Synchronizing data action - from Ftrack to Avalon DB + + Stores all information about entity. + - Name(string) - Most important information = identifier of entity + - Parent(ObjectId) - Avalon Project Id, if entity is not project itself + - Silo(string) - Last parent except project + - Data(dictionary): + - VisualParent(ObjectId) - Avalon Id of parent asset + - Parents(array of string) - All parent names except project + - Tasks(array of string) - Tasks on asset + - FtrackId(string) + - entityType(string) - entity's type on Ftrack + * All Custom attributes in group 'Avalon' which name don't start with 'avalon_' + + * These information are stored also for all parents and children entities. + + Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'. + - action IS NOT creating this Custom attribute if doesn't exist + - run 'Create Custom Attributes' action or do it manually (Not recommended) + + If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: + - name, parents and silo are checked -> shows error if are not exact the same + - after sync it is not allowed to change names or move entities + + If ID in 'avalon_mongo_id' is empty string or is not found in DB: + - tries to find entity by name + - found: + - raise error if ftrackId/visual parent/parents are not same + - not found: + - Creates asset/project + + ''' #: Action identifier. identifier = 'sync.to.avalon' @@ -27,11 +59,20 @@ class SyncToAvalon(BaseAction): def discover(self, session, entities, event): ''' Validation ''' + roleCheck = False discover = False - for entity in entities: - if entity.entity_type.lower() not in ['task', 'assetversion']: - discover = True - break + roleList = ['Administrator', 'Project Manager'] + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + + for role in user['user_security_roles']: + if role['security_role']['name'] in roleList: + roleCheck = True + if roleCheck is True: + for entity in entities: + if entity.entity_type.lower() not in ['task', 'assetversion']: + discover = True + break return discover @@ -52,10 +93,10 @@ class SyncToAvalon(BaseAction): }) try: - self.log.info("action <" + self.__class__.__name__ + "> is running") - + self.log.info("Action <" + self.__class__.__name__ + "> is running") + self.ca_mongoid = 'avalon_mongo_id' #TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug - self.setAvalonAttributes(session) + self.setAvalonAttributes() self.importable = [] # get from top entity in hierarchy all parent entities @@ -69,34 +110,58 @@ class SyncToAvalon(BaseAction): for entity in entities: self.getShotAsset(entity) - # Check duplicate name - raise error if found - all_names = {} + # Check names: REGEX in schema/duplicates - raise error if found + all_names = [] duplicates = [] for e in self.importable: - name = self.checkName(e['name']) - if name in all_names: - duplicates.append("'{}'-'{}'".format(all_names[name], e['name'])) + ftrack_utils.avalon_check_name(e) + if e['name'] in all_names: + duplicates.append("'{}'".format(e['name'])) else: - all_names[name] = e['name'] + all_names.append(e['name']) if len(duplicates) > 0: - raise ValueError("Unable to sync: Entity name duplication: {}".format(", ".join(duplicates))) + raise ValueError("Entity name duplication: {}".format(", ".join(duplicates))) + + ## ----- PROJECT ------ + # store Ftrack project- self.importable[0] must be project entity!!! + self.entityProj = self.importable[0] + # set AVALON_ env + os.environ["AVALON_PROJECT"] = self.entityProj["full_name"] + os.environ["AVALON_ASSET"] = self.entityProj["full_name"] + + self.avalon_project = None + + io.install() # Import all entities to Avalon DB for e in self.importable: self.importToAvalon(session, e) + io.uninstall() + job['status'] = 'done' session.commit() self.log.info('Synchronization to Avalon was successfull!') + except ValueError as ve: + job['status'] = 'failed' + session.commit() + message = str(ve) + self.log.error('Error during syncToAvalon: {}'.format(message)) + except Exception as e: job['status'] = 'failed' - message = str(e) - self.log.error('During synchronization to Avalon went something wrong! ({})'.format(message)) + session.commit() + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + log_message = "{}/{}/Line: {}".format(exc_type, fname, exc_tb.tb_lineno) + self.log.error('Error during syncToAvalon: {}'.format(log_message)) + message = 'Unexpected Error - Please check Log for more information' if len(message) > 0: + message = "Unable to sync: {}".format(message) return { 'success': False, 'message': message @@ -106,9 +171,10 @@ class SyncToAvalon(BaseAction): 'success': True, 'message': "Synchronization was successfull" } - def setAvalonAttributes(self, session): + + def setAvalonAttributes(self): self.custom_attributes = [] - all_avalon_attr = session.query('CustomAttributeGroup where name is "avalon"').one() + all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one() for cust_attr in all_avalon_attr['custom_attribute_configurations']: if 'avalon_' not in cust_attr['key']: self.custom_attributes.append(cust_attr) @@ -123,182 +189,116 @@ class SyncToAvalon(BaseAction): for child in childrens: self.getShotAsset(child) - def checkName(self, input_name): - if input_name.find(" ") == -1: - name = input_name - else: - name = input_name.replace(" ", "-") - self.log.info("Name of {} was changed to {}".format(input_name, name)) - return name - - def getConfig(self, entity): - apps = [] - for app in entity['custom_attributes']['applications']: - try: - label = toml.load(lib.which_app(app))['label'] - apps.append({'name':app, 'label':label}) - except Exception as e: - self.log.error('Error with application {0} - {1}'.format(app, e)) - - config = { - 'schema': 'avalon-core:config-1.0', - 'tasks': [{'name': ''}], - 'apps': apps, - # TODO redo work!!! - 'template': { - 'workfile': '{asset[name]}_{task[name]}_v{version:0>3}<_{comment}>', - 'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}', - 'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'} - } - return config - - def importToAvalon(self, session, entity): - eLinks = [] - - ca_mongoid = 'avalon_mongo_id' - - # get needed info of entity and all parents - for e in entity['link']: - tmp = session.get(e['type'], e['id']) - eLinks.append(tmp) - - entityProj = eLinks[0] - - # set AVALON_PROJECT env - os.environ["AVALON_PROJECT"] = entityProj["full_name"] - os.environ["AVALON_ASSET"] = entityProj['full_name'] - - # Set project template - template = {"schema": "avalon-core:inventory-1.0"} - # --- Begin: PUSH TO Avalon --- - io.install() - ## ----- PROJECT ------ - # If project don't exists -> ELSE - avalon_project = io.find_one({"type": "project", "name": entityProj["full_name"]}) + entity_type = entity.entity_type - data = {} - data['ftrackId'] = entity['id'] - data['entityType'] = entity_type - - for cust_attr in self.custom_attributes: - if cust_attr['entity_type'].lower() in ['asset']: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] - - elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] - - elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': - # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) - # Get object id of entity type - ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type)).one()['id'] - - if cust_attr['object_type_id'] == ent_obj_type_id: - data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']] - - - if entity.entity_type.lower() in ['project']: + if entity_type.lower() in ['project']: # Set project Config - config = self.getConfig(entity) + config = ftrack_utils.get_config(entity) + # Set project template + template = lib.get_avalon_project_template_schema() + if self.ca_mongoid in entity['custom_attributes']: + try: + projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid]) + self.avalon_project = io.find_one({"_id": projectId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) - if avalon_project is None: - inventory.save(entityProj['full_name'], config, template) - else: - io.update_many({'type': 'project','name': entityProj['full_name']}, - {'$set':{'config':config}}) + if self.avalon_project is None: + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) + if self.avalon_project is None: + inventory.save(entity['full_name'], config, template) + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) - data['code'] = entity['name'] + elif self.avalon_project['name'] != entity['full_name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(avalon_asset['name'], name)) + + data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) # Store info about project (FtrackId) io.update_many({ 'type': 'project', - 'name': entity['full_name']}, - {'$set':{'data':data}}) + 'name': entity['full_name'] + }, { + '$set':{'data':data, 'config':config} + }) - projectId = io.find_one({"type": "project", "name": entityProj["full_name"]})["_id"] - if ca_mongoid in entity['custom_attributes']: - entity['custom_attributes'][ca_mongoid] = str(projectId) + self.projectId = self.avalon_project["_id"] + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) else: self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) - io.uninstall() return - # Store project Id - projectId = avalon_project["_id"] - ## ----- ASSETS ------ # Presets: - # TODO how to check if entity is Asset Library or AssetBuild? - if entity.entity_type in ['AssetBuild', 'Library']: - silo = 'Assets' + data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) + + # return if entity is silo + if len(data['parents']) == 0: + return else: - silo = 'Film' + silo = data['parents'][0] os.environ['AVALON_SILO'] = silo - # Get list of parents without project - parents = [] - for i in range(1, len(eLinks)-1): - parents.append(eLinks[i]) - - # Get info for 'Data' in Avalon DB - tasks = [] - for child in entity['children']: - if child.entity_type in ['Task']: - tasks.append(child['name']) - - folderStruct = [] - parentId = None - - for parent in parents: - name = self.checkName(parent['name']) - folderStruct.append(name) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] - if parent['parent'].entity_type != 'project' and parentId is None: - self.importToAvalon(parent) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] - - hierarchy = os.path.sep.join(folderStruct) - - data['visualParent'] = parentId - data['parents'] = folderStruct - data['tasks'] = tasks - data['hierarchy'] = hierarchy - - - name = self.checkName(entity['name']) + name = entity['name'] os.environ['AVALON_ASSET'] = name + # Try to find asset in current database - avalon_asset = io.find_one({'type': 'asset', 'name': name}) - # Create if don't exists + avalon_asset = None + if self.ca_mongoid in entity['custom_attributes']: + try: + entityId = ObjectId(entity['custom_attributes'][self.ca_mongoid]) + avalon_asset = io.find_one({"_id": entityId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) + if avalon_asset is None: - inventory.create_asset(name, silo, data, projectId) - self.log.debug("Asset {} - created".format(name)) - # Raise error if it seems to be different ent. with same name + avalon_asset = io.find_one({'type': 'asset', 'name': name}) + # Create if don't exists + if avalon_asset is None: + inventory.create_asset(name, silo, data, self.projectId) + self.log.debug("Asset {} - created".format(name)) - elif (avalon_asset['data']['ftrackId'] != data['ftrackId'] or - avalon_asset['data']['visualParent'] != data['visualParent'] or - avalon_asset['data']['parents'] != data['parents']): - raise ValueError('Entity <{}> is not same'.format(name)) - # Else update info - else: - io.update_many({'type': 'asset','name': name}, - {'$set':{'data':data, 'silo': silo}}) - # TODO check if is asset in same folder!!! ???? FEATURE FOR FUTURE - self.log.debug("Asset {} - updated".format(name)) + # Raise error if it seems to be different ent. with same name + else: + aD = avalon_asset['data'] + # check_attr = ['parents', 'ftrackId', 'visualParent'] + if (avalon_asset['data']['parents'] != data['parents'] or + avalon_asset['silo'] != silo): + raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name)) + elif avalon_asset['name'] != entity['name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please create new asset'.format(avalon_asset['name'], name)) + elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: + old_path = "/".join(avalon_asset['data']['parents']) + new_path = "/".join(data['parents']) + raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" '.format(avalon_asset['name'], old_path, new_path)) + + # Update info + io.update_many({'type': 'asset','name': name}, + {'$set':{'data':data, 'silo': silo}}) + + self.log.debug("Asset {} - updated".format(name)) + + entityId = io.find_one({'type': 'asset', 'name': name})['_id'] ## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK # Set custom attribute to avalon/mongo id of entity (parentID is last) - if ca_mongoid in entity['custom_attributes']: - entity['custom_attributes'][ca_mongoid] = str(parentId) + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(entityId) else: self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) - io.uninstall() session.commit() diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index ba67912c9f..15c57dbb1c 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -1,6 +1,7 @@ # :coding: utf-8 # :copyright: Copyright (c) 2017 ftrack import os +import sys import logging import getpass import platform @@ -73,6 +74,8 @@ class AppAction(object): ) self.log.info("Application '{}' - Registered successfully".format(self.label)) + self.log.info("Application '{}' - Registered successfully".format(self.label)) + def _discover(self, event): args = self._translate_event( self.session, event @@ -299,20 +302,71 @@ class AppAction(object): # Full path to executable launcher execfile = None - for ext in os.environ["PATHEXT"].split(os.pathsep): - fpath = os.path.join(path.strip('"'), self.executable + ext) - if os.path.isfile(fpath) and os.access(fpath, os.X_OK): - execfile = fpath - break + if sys.platform == "win32": + + for ext in os.environ["PATHEXT"].split(os.pathsep): + fpath = os.path.join(path.strip('"'), self.executable + ext) + if os.path.isfile(fpath) and os.access(fpath, os.X_OK): + execfile = fpath + break + pass + + # Run SW if was found executable + if execfile is not None: + lib.launch(executable=execfile, args=[], environment=env) + else: + return { + 'success': False, + 'message': "We didn't found launcher for {0}" + .format(self.label) + } + pass + + if sys.platform.startswith('linux'): + execfile = os.path.join(path.strip('"'), self.executable) + if os.path.isfile(execfile): + try: + fp = open(execfile) + except PermissionError as p: + log.error('Access denied on {0} - {1}'. + format(execfile, p)) + return { + 'success': False, + 'message': "Access denied on launcher - {}". + format(execfile) + } + fp.close() + # check executable permission + if not os.access(execfile, os.X_OK): + log.error('No executable permission on {}'. + format(execfile)) + return { + 'success': False, + 'message': "No executable permission - {}" + .format(execfile) + } + pass + else: + log.error('Launcher doesn\'t exist - {}'. + format(execfile)) + return { + 'success': False, + 'message': "Launcher doesn't exist - {}" + .format(execfile) + } + pass + # Run SW if was found executable + if execfile is not None: + lib.launch('/usr/bin/env', args=['bash', execfile], environment=env) + else: + return { + 'success': False, + 'message': "We didn't found launcher for {0}" + .format(self.label) + } + pass + - # Run SW if was found executable - if execfile is not None: - lib.launch(executable=execfile, args=[], environment=env) - else: - return { - 'success': False, - 'message': "We didn't found launcher for {0}".format(self.label) - } # RUN TIMER IN FTRACK username = event['source']['user']['username'] @@ -437,8 +491,8 @@ class BaseAction(object): ), self._launch ) - self.log.info("----- action - <" + self.__class__.__name__ + - "> - Has been registered -----") + + self.log.info("Action '{}' - Registered successfully".format(self.__class__.__name__)) def _discover(self, event): args = self._translate_event( diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 23531a9fdd..66d739829b 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -1,14 +1,145 @@ -# fttrack help functions - -import ftrack_api import os +import sys +import re from pprint import * +import ftrack_api +from pype import lib +import avalon.io as io +import avalon.api +import avalon +from avalon.vendor import toml, jsonschema +from app.api import Logger -def checkLogin(): - # check Environments FTRACK_API_USER, FTRACK_API_KEY - pass +log = Logger.getLogger(__name__) +def get_data(parent, entity, session, custom_attributes): + entity_type = entity.entity_type + + data = {} + data['ftrackId'] = entity['id'] + data['entityType'] = entity_type + + for cust_attr in custom_attributes: + key = cust_attr['key'] + if cust_attr['entity_type'].lower() in ['asset']: + data[key] = entity['custom_attributes'][key] + + elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': + data[key] = entity['custom_attributes'][key] + + elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': + # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') + entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) + # Get object id of entity type + ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] + + if cust_attr['object_type_id'] == ent_obj_type_id: + data[key] = entity['custom_attributes'][key] + + if entity_type in ['Project']: + data['code'] = entity['name'] + return data + + # Get info for 'Data' in Avalon DB + tasks = [] + for child in entity['children']: + if child.entity_type in ['Task']: + tasks.append(child['name']) + + # Get list of parents without project + parents = [] + folderStruct = [] + for i in range(1, len(entity['link'])-1): + parEnt = session.get(entity['link'][i]['type'], entity['link'][i]['id']) + parName = parEnt['name'] + folderStruct.append(parName) + if i > 1: + parents.append(parEnt) + + parentId = None + + for parent in parents: + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] + if parent['parent'].entity_type != 'project' and parentId is None: + parent.importToAvalon(parent) + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] + + hierarchy = os.path.sep.join(folderStruct) + + data['visualParent'] = parentId + data['parents'] = folderStruct + data['tasks'] = tasks + data['hierarchy'] = hierarchy + + return data + +def avalon_check_name(entity, inSchema = None): + ValidationError = jsonschema.ValidationError + alright = True + name = entity['name'] + if " " in name: + alright = False + + data = {} + data['data'] = {} + data['type'] = 'asset' + schema = "avalon-core:asset-2.0" + # TODO have project any REGEX check? + if entity.entity_type in ['Project']: + # data['type'] = 'project' + name = entity['full_name'] + # schema = get_avalon_project_template_schema()['schema'] + # elif entity.entity_type in ['AssetBuild','Library']: + # data['silo'] = 'Assets' + # else: + # data['silo'] = 'Film' + data['silo'] = 'Film' + + if inSchema is not None: + schema = inSchema + data['schema'] = schema + data['name'] = name + try: + avalon.schema.validate(data) + except ValidationError: + alright = False + + if alright is False: + raise ValueError("{} includes unsupported symbols like 'dash' or 'space'".format(name)) + + + +def get_apps(entity): + """ Get apps from project + Requirements: + 'Entity' MUST be object of ftrack entity with entity_type 'Project' + Checking if app from ftrack is available in Templates/bin/{app_name}.toml + + Returns: + Array with dictionaries with app Name and Label + """ + apps = [] + for app in entity['custom_attributes']['applications']: + try: + app_config = {} + app_config['name'] = app + app_config['label'] = toml.load(avalon.lib.which_app(app))['label'] + + apps.append(app_config) + + except Exception as e: + log.warning('Error with application {0} - {1}'.format(app, e)) + return apps + +def get_config(entity): + config = {} + config['schema'] = lib.get_avalon_project_config_schema() + config['tasks'] = [{'name': ''}] + config['apps'] = get_apps(entity) + config['template'] = lib.get_avalon_project_template() + + return config def checkRegex(): # _handle_result -> would be solution? diff --git a/pype/lib.py b/pype/lib.py index 3ce1441e3d..b0f427aeb6 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -9,6 +9,7 @@ from .vendor.pather.error import ParseError import avalon.io as io import avalon.api +import avalon log = logging.getLogger(__name__) @@ -335,3 +336,31 @@ def get_asset_data(asset=None): data = document.get("data", {}) return data + +def get_avalon_project_config_schema(): + schema = 'avalon-core:config-1.0' + return schema + +def get_avalon_project_template_schema(): + schema = {"schema": "avalon-core:inventory-1.0"} + return schema + +def get_avalon_project_template(): + from app.api import Templates + + """Get avalon template + + Returns: + dictionary with templates + """ + template = Templates(type=["anatomy"]) + proj_template = {} + proj_template['workfile'] = '{asset[name]}_{task[name]}_v{version:0>3}<_{comment}>' + proj_template['work'] = '{root}/{project}/{hierarchy}/{asset}/work/{task}' + proj_template['publish'] = '{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}' + # TODO this down should work but it can't be in default.toml: + # - Raises error when App (e.g. Nuke) is started + # proj_template['workfile'] = template.anatomy.avalon.workfile + # proj_template['work'] = template.anatomy.avalon.work + # proj_template['publish'] = template.anatomy.avalon.publish + return proj_template diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py index 177ced5ddb..441dd6f88d 100644 --- a/pype/plugins/ftrack/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -1,5 +1,6 @@ import pyblish.api import os +import clique class IntegrateFtrackInstance(pyblish.api.InstancePlugin): @@ -21,7 +22,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): 'setdress': 'setdress', 'pointcache': 'cache', 'review': 'mov', - 'write': 'img'} + 'write': 'img', + 'render': 'render'} def process(self, instance): @@ -37,20 +39,25 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): componentList = [] - transfers = instance.data["transfers"] + dst_list = instance.data['destination_list'] ft_session = instance.context.data["ftrackSession"] location = ft_session.query( 'Location where name is "ftrack.unmanaged"').one() self.log.debug('location {}'.format(location)) - for src, dest in transfers: - filename, ext = os.path.splitext(src) - self.log.debug('source filename: ' + filename) - self.log.debug('source ext: ' + ext) + for file in instance.data['destination_list']: + self.log.debug('file {}'.format(file)) + + for file in dst_list: + filename, ext = os.path.splitext(file) + self.log.debug('dest ext: ' + ext) componentList.append({"assettype_data": { "short": asset_type, + }, + "asset_data": { + "name": instance.data["subset"], }, "assetversion_data": { "version": version_number, @@ -58,7 +65,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): "component_data": { "name": ext[1:], # Default component name is "main". }, - "component_path": dest, + "component_path": file, 'component_location': location, "component_overwrite": False, } diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index e20f59133c..698eb907e9 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -82,8 +82,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): self.log.debug("Establishing staging directory @ %s" % stagingdir) - project = io.find_one({"type": "project"}, - projection={"config.template.publish": True}) + project = io.find_one({"type": "project"}) asset = io.find_one({"type": "asset", "name": ASSET, @@ -136,12 +135,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # \|________| # root = api.registered_root() - # template_data = {"root": root, - # "project": PROJECT, - # "silo": asset['silo'], - # "asset": ASSET, - # "subset": subset["name"], - # "version": version["name"]} hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] if hierarchy: # hierarchy = os.path.sep.join(hierarchy) @@ -149,7 +142,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): template_data = {"root": root, "project": {"name": PROJECT, - "code": "prjX"}, + "code": project['data']['code']}, "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], @@ -163,6 +156,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] + destination_list = [] for files in instance.data["files"]: @@ -195,6 +189,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): dst = anatomy_filled.publish.path instance.data["transfers"].append([src, dst]) + template = anatomy.publish.path else: # Single file @@ -218,13 +213,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin): dst = anatomy_filled.publish.path instance.data["transfers"].append([src, dst]) + template = anatomy.publish.path representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], - "data": {'path': dst}, + "data": {'path': dst, 'template': template}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context @@ -232,7 +228,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "context": { "root": root, "project": PROJECT, - "projectcode": "prjX", + "projectcode": project['data']['code'], 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, @@ -243,6 +239,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "representation": ext[1:] } } + + destination_list.append(dst) + instance.data['destination_list'] = destination_list representations.append(representation) self.log.info("Registering {} items".format(len(representations))) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 5d64c60252..91f4fcaac8 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -24,16 +24,14 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): except Exception: continue - try: - publish = node.knob("publish").value() - except Exception: - continue - # get data from avalon knob avalon_knob_data = get_avalon_knob_data(node) if not avalon_knob_data: continue + if avalon_knob_data["id"] != "pyblish.avalon.instance": + continue + subset = avalon_knob_data.get("subset", None) or node["name"].value() # Create instance @@ -45,8 +43,12 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "asset": os.environ["AVALON_ASSET"], "label": node.name(), "name": node.name(), + "subset": subset, + "families": [avalon_knob_data["families"]], + "family": avalon_knob_data["family"], "avalonKnob": avalon_knob_data, - "publish": publish + "publish": node.knob('publish') + }) self.log.info("collected instance: {}".format(instance.data)) instances.append(instance) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 1f1d79fefe..33ac8592a5 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -78,8 +78,7 @@ class CollectNukeWrites(pyblish.api.ContextPlugin): "outputDir": output_dir, "ext": ext, "label": label, - "family": instance.data["avalonKnob"]["family"], - "families": [families], + "families": [families, 'ftrack'], "startFrame": first_frame, "endFrame": last_frame, "outputType": output_type, diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py index 8b7df93d1b..8c178df4e4 100644 --- a/pype/plugins/nuke/publish/integrate_rendered_frames.py +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -31,7 +31,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.register(instance) self.log.info("Integrating Asset in to the database ...") - # self.integrate(instance) + if instance.data.get('transfer', True): + self.integrate(instance) def register(self, instance): @@ -71,8 +72,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): self.log.debug("Establishing staging directory @ %s" % stagingdir) - project = io.find_one({"type": "project"}, - projection={"config.template.publish": True}) + project = io.find_one({"type": "project"}) asset = io.find_one({"type": "asset", "name": ASSET, @@ -127,21 +127,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # \|________| # root = api.registered_root() - # template_data = {"root": root, - # "project": PROJECT, - # "silo": asset['silo'], - # "asset": ASSET, - # "subset": subset["name"], - # "version": version["name"]} hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents'] + if hierarchy: # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*hierarchy) self.log.debug("hierarchy: {}".format(hierarchy)) template_data = {"root": root, "project": {"name": PROJECT, - "code": "prjX"}, + "code": project['data']['code']}, "silo": asset['silo'], + "task": api.Session["AVALON_TASK"], "asset": ASSET, "family": instance.data['family'], "subset": subset["name"], @@ -154,6 +150,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # Find the representations to transfer amongst the files # Each should be a single representation (as such, a single extension) representations = [] + destination_list = [] for files in instance.data["files"]: # Collection @@ -166,26 +163,30 @@ class IntegrateFrames(pyblish.api.InstancePlugin): # |_______| # if isinstance(files, list): + collection = files # Assert that each member has identical suffix - _, ext = os.path.splitext(collection[0]) - assert all(ext == os.path.splitext(name)[1] - for name in collection), ( - "Files had varying suffixes, this is a bug" - ) - - assert not any(os.path.isabs(name) for name in collection) - - template_data["representation"] = ext[1:] + dst_collection = [] for fname in collection: + filename, ext = os.path.splitext(fname) + _, frame = os.path.splitext(filename) + + template_data["representation"] = ext[1:] + template_data["frame"] = frame[1:] + src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled.publish.path + dst = anatomy_filled.render.path - # if instance.data.get('transfer', True): - # instance.data["transfers"].append([src, dst]) + dst_collection.append(dst) + instance.data["transfers"].append([src, dst]) + + template = anatomy.render.path + + collections, remainder = clique.assemble(dst_collection) + dst = collections[0].format('{head}{padding}{tail}') else: # Single file @@ -206,36 +207,37 @@ class IntegrateFrames(pyblish.api.InstancePlugin): src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) - dst = anatomy_filled.publish.path + dst = anatomy_filled.render.path + template = anatomy.render.path + instance.data["transfers"].append([src, dst]) - # if instance.data.get('transfer', True): - # dst = src - # instance.data["transfers"].append([src, dst]) representation = { "schema": "pype:representation-2.0", "type": "representation", "parent": version_id, "name": ext[1:], - "data": {'path': src}, + "data": {'path': dst, 'template': template}, "dependencies": instance.data.get("dependencies", "").split(), # Imprint shortcut to context # for performance reasons. "context": { - "root": root, - "project": PROJECT, - "projectcode": "prjX", - 'task': api.Session["AVALON_TASK"], - "silo": asset['silo'], - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": version["name"], - "hierarchy": hierarchy, - "representation": ext[1:] + "root": root, + "project": PROJECT, + "projectcode": project['data']['code'], + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] } } + destination_list.append(dst) + instance.data['destination_list'] = destination_list representations.append(representation) self.log.info("Registering {} items".format(len(representations))) diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py index e8137d006c..27f258d3b3 100644 --- a/pype/plugins/nuke/publish/validate_collection.py +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -45,6 +45,14 @@ class ValidateCollection(pyblish.api.InstancePlugin): assert remainder is not None, self.log.info("There are some extra files in folder") + basename, ext = os.path.splitext(list(collections[0])[0]) + assert all(ext == os.path.splitext(name)[1] + for name in collections[0]), self.log.info( + "Files had varying suffixes" + ) + + assert not any(os.path.isabs(name) for name in collections[0]), self.log.info("some file name are absolute") + self.log.info('frame_length: {}'.format(frame_length)) self.log.info('len(list(instance.data["files"])): {}'.format( len(list(instance.data["files"][0]))))