diff --git a/pype/__init__.py b/pype/__init__.py index e5d1aee374..8bd31c060d 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -6,6 +6,15 @@ from avalon import api as avalon from .launcher_actions import register_launcher_actions from .lib import collect_container_metadata +import logging +log = logging.getLogger(__name__) + +# do not delete these are mandatory +Anatomy = None +Dataflow = None +Metadata = None +Colorspace = None + PACKAGE_DIR = os.path.dirname(__file__) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") @@ -15,12 +24,13 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "global", "load") def install(): - print("Registering global plug-ins..") + log.info("Registering global plug-ins..") pyblish.register_plugin_path(PUBLISH_PATH) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) def uninstall(): - print("Deregistering global plug-ins..") + log.info("Deregistering global plug-ins..") pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) + log.info("Global plug-ins unregistred") diff --git a/pype/api.py b/pype/api.py index e665d40535..36094feb7f 100644 --- a/pype/api.py +++ b/pype/api.py @@ -15,6 +15,26 @@ from .action import ( RepairContextAction ) +from app.api import Logger + +from . import ( + Anatomy, + Colorspace, + Metadata, + Dataflow +) +from .templates import ( + load_data_from_templates, + reset_data_from_templates, + get_project_name, + get_project_code, + get_hiearchy, + get_asset, + get_task, + fill_avalon_workdir, + get_version_from_workfile +) + __all__ = [ # plugin classes "Extractor", @@ -25,5 +45,28 @@ __all__ = [ "ValidateMeshOrder", # action "get_errored_instances_from_context", - "RepairAction" + "RepairAction", + + "Logger", + + # contectual templates + # get data to preloaded templates + "load_data_from_templates", + "reset_data_from_templates", + + # get contextual data + "get_project_name", + "get_project_code", + "get_hiearchy", + "get_asset", + "get_task", + "fill_avalon_workdir", + "get_version_from_workfile", + + # preloaded templates + "Anatomy", + "Colorspace", + "Metadata", + "Dataflow" + ] diff --git a/pype/ftrack/actions/action_Apps.py b/pype/ftrack/actions/action_Apps.py index 9d1e182c19..3d1bf093de 100644 --- a/pype/ftrack/actions/action_Apps.py +++ b/pype/ftrack/actions/action_Apps.py @@ -14,7 +14,7 @@ def registerApp(app, session): try: variant = app['name'].split("_")[1] except Exception as e: - log.warning("'{0}' - App 'name' and 'variant' is not separated by '_' (variant is set to '')".format(app['name'])) + log.warning("'{0}' - App 'name' and 'variant' is not separated by '_' (variant is not set)".format(app['name'])) return abspath = lib.which_app(app['name']) @@ -23,17 +23,16 @@ def registerApp(app, session): return apptoml = toml.load(abspath) + executable = apptoml['executable'] label = app['label'] + if 'ftrack_label' in apptoml: + label = apptoml['ftrack_label'] + icon = None - # TODO get right icons - if 'nuke' in app['name']: - icon = "https://mbtskoudsalg.com/images/nuke-icon-png-2.png" - label = "Nuke" - elif 'maya' in app['name']: - icon = "http://icons.iconarchive.com/icons/froyoshark/enkel/256/Maya-icon.png" - label = "Autodesk Maya" + if 'icon' in apptoml: + icon = apptoml['icon'] # register action AppAction(session, label, name, executable, variant, icon).register() diff --git a/pype/ftrack/actions/action_createCustomAttributes.py b/pype/ftrack/actions/action_createCustomAttributes.py index 5f9dbd7381..d45a92a01d 100644 --- a/pype/ftrack/actions/action_createCustomAttributes.py +++ b/pype/ftrack/actions/action_createCustomAttributes.py @@ -24,14 +24,18 @@ class AvalonIdAttribute(BaseAction): def discover(self, session, entities, event): - ''' Validation ''' + ''' + Validation + - action is only for Administrators + ''' + success = False + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + for role in user['user_security_roles']: + if role['security_role']['name'] == 'Administrator': + success = True - # userId = event['source']['user']['id'] - # user = session.query('User where id is ' + userId).one() - # if user['user_security_roles'][0]['security_role']['name'] != 'Administrator': - # return False - - return True + return success def launch(self, session, entities, event): @@ -49,13 +53,21 @@ class AvalonIdAttribute(BaseAction): }) session.commit() try: + # Checkbox for event sync + cbxSyncName = 'avalon_auto_sync' + cbxSyncLabel = 'Avalon auto-sync' + cbxSyncExist = False + # Attribute Name and Label custAttrName = 'avalon_mongo_id' custAttrLabel = 'Avalon/Mongo Id' + + attrs_update = set() # Types that don't need object_type_id base = {'show'} + # Don't create custom attribute on these entity types: - exceptions = ['task','milestone','library'] + exceptions = ['task', 'milestone'] exceptions.extend(base) # Get all possible object types all_obj_types = session.query('ObjectType').all() @@ -73,6 +85,7 @@ class AvalonIdAttribute(BaseAction): # Get IDs of filtered object types all_obj_types_id = set() + for obj in all_obj_types: all_obj_types_id.add(obj['id']) @@ -80,20 +93,60 @@ class AvalonIdAttribute(BaseAction): current_cust_attr = session.query('CustomAttributeConfiguration').all() # Filter already existing AvalonMongoID attr. for attr in current_cust_attr: + if attr['key'] == cbxSyncName: + cbxSyncExist = True + cbxAttribute = attr if attr['key'] == custAttrName: if attr['entity_type'] in base: base.remove(attr['entity_type']) + attrs_update.add(attr) if attr['object_type_id'] in all_obj_types_id: all_obj_types_id.remove(attr['object_type_id']) + attrs_update.add(attr) # Set session back to begin("session.query" raises error on commit) session.rollback() # Set security roles for attribute - custAttrSecuRole = session.query('SecurityRole').all() + role_api = session.query('SecurityRole where name is "API"').one() + role_admin = session.query('SecurityRole where name is "Administrator"').one() + roles = [role_api,role_admin] + # Set Text type of Attribute custom_attribute_type = session.query( 'CustomAttributeType where name is "text"' ).one() + # Get/Set 'avalon' group + groups = session.query('CustomAttributeGroup where name is "avalon"').all() + if len(groups) > 1: + msg = "There are more Custom attribute groups with name 'avalon'" + self.log.warning(msg) + return { 'success': False, 'message':msg } + + elif len(groups) < 1: + group = session.create('CustomAttributeGroup', { + 'name': 'avalon', + }) + session.commit() + else: + group = groups[0] + + # Checkbox for auto-sync event / Create or Update(roles + group) + if cbxSyncExist is False: + cbxType = session.query('CustomAttributeType where name is "boolean"').first() + session.create('CustomAttributeConfiguration', { + 'entity_type': 'show', + 'type': cbxType, + 'label': cbxSyncLabel, + 'key': cbxSyncName, + 'default': False, + 'write_security_roles': roles, + 'read_security_roles': roles, + 'group':group, + }) + else: + cbxAttribute['write_security_roles'] = roles + cbxAttribute['read_security_roles'] = roles + cbxAttribute['group'] = group for entity_type in base: # Create a custom attribute configuration. @@ -103,8 +156,9 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': custAttrSecuRole, - 'read_security_roles': custAttrSecuRole, + 'write_security_roles': roles, + 'read_security_roles': roles, + 'group':group, 'config': json.dumps({'markdown': False}) }) @@ -117,16 +171,24 @@ class AvalonIdAttribute(BaseAction): 'label': custAttrLabel, 'key': custAttrName, 'default': '', - 'write_security_roles': custAttrSecuRole, - 'read_security_roles': custAttrSecuRole, + 'write_security_roles': roles, + 'read_security_roles': roles, + 'group':group, 'config': json.dumps({'markdown': False}) }) + for attr in attrs_update: + attr['write_security_roles'] = roles + attr['read_security_roles'] = roles + attr['group'] = group + job['status'] = 'done' session.commit() except Exception as e: + session.rollback() job['status'] = 'failed' + session.commit() self.log.error("Creating custom attributes failed ({})".format(e)) return True diff --git a/pype/ftrack/actions/action_syncToAvalon.py b/pype/ftrack/actions/action_syncToAvalon.py index e2cf4e07f8..c354f2332d 100644 --- a/pype/ftrack/actions/action_syncToAvalon.py +++ b/pype/ftrack/actions/action_syncToAvalon.py @@ -1,5 +1,3 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2017 ftrack import sys import argparse import logging @@ -9,12 +7,45 @@ import json import re from pype import lib from ftrack_action_handler import BaseAction +from bson.objectid import ObjectId from avalon import io, inventory -from avalon.vendor import toml + from pype.ftrack import ftrack_utils class SyncToAvalon(BaseAction): - '''Edit meta data action.''' + ''' + Synchronizing data action - from Ftrack to Avalon DB + + Stores all information about entity. + - Name(string) - Most important information = identifier of entity + - Parent(ObjectId) - Avalon Project Id, if entity is not project itself + - Silo(string) - Last parent except project + - Data(dictionary): + - VisualParent(ObjectId) - Avalon Id of parent asset + - Parents(array of string) - All parent names except project + - Tasks(array of string) - Tasks on asset + - FtrackId(string) + - entityType(string) - entity's type on Ftrack + * All Custom attributes in group 'Avalon' which name don't start with 'avalon_' + + * These information are stored also for all parents and children entities. + + Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'. + - action IS NOT creating this Custom attribute if doesn't exist + - run 'Create Custom Attributes' action or do it manually (Not recommended) + + If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID: + - name, parents and silo are checked -> shows error if are not exact the same + - after sync it is not allowed to change names or move entities + + If ID in 'avalon_mongo_id' is empty string or is not found in DB: + - tries to find entity by name + - found: + - raise error if ftrackId/visual parent/parents are not same + - not found: + - Creates asset/project + + ''' #: Action identifier. identifier = 'sync.to.avalon' @@ -28,12 +59,20 @@ class SyncToAvalon(BaseAction): def discover(self, session, entities, event): ''' Validation ''' - + roleCheck = False discover = False - for entity in entities: - if entity.entity_type.lower() not in ['task', 'assetversion']: - discover = True - break + roleList = ['Administrator', 'Project Manager'] + userId = event['source']['user']['id'] + user = session.query('User where id is ' + userId).one() + + for role in user['user_security_roles']: + if role['security_role']['name'] in roleList: + roleCheck = True + if roleCheck is True: + for entity in entities: + if entity.entity_type.lower() not in ['task', 'assetversion']: + discover = True + break return discover @@ -54,8 +93,8 @@ class SyncToAvalon(BaseAction): }) try: - self.log.info("action <" + self.__class__.__name__ + "> is running") - + self.log.info("Action <" + self.__class__.__name__ + "> is running") + self.ca_mongoid = 'avalon_mongo_id' #TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug self.setAvalonAttributes() self.importable = [] @@ -71,34 +110,58 @@ class SyncToAvalon(BaseAction): for entity in entities: self.getShotAsset(entity) - # Check duplicate name - raise error if found - all_names = {} + # Check names: REGEX in schema/duplicates - raise error if found + all_names = [] duplicates = [] for e in self.importable: - name = self.checkName(e['name']) - if name in all_names: - duplicates.append("'{}'-'{}'".format(all_names[name], e['name'])) + ftrack_utils.avalon_check_name(e) + if e['name'] in all_names: + duplicates.append("'{}'".format(e['name'])) else: - all_names[name] = e['name'] + all_names.append(e['name']) if len(duplicates) > 0: - raise ValueError("Unable to sync: Entity name duplication: {}".format(", ".join(duplicates))) + raise ValueError("Entity name duplication: {}".format(", ".join(duplicates))) + + ## ----- PROJECT ------ + # store Ftrack project- self.importable[0] must be project entity!!! + self.entityProj = self.importable[0] + # set AVALON_ env + os.environ["AVALON_PROJECT"] = self.entityProj["full_name"] + os.environ["AVALON_ASSET"] = self.entityProj["full_name"] + + self.avalon_project = None + + io.install() # Import all entities to Avalon DB for e in self.importable: self.importToAvalon(session, e) + io.uninstall() + job['status'] = 'done' session.commit() self.log.info('Synchronization to Avalon was successfull!') + except ValueError as ve: + job['status'] = 'failed' + session.commit() + message = str(ve) + self.log.error('Error during syncToAvalon: {}'.format(message)) + except Exception as e: job['status'] = 'failed' - message = str(e) - self.log.error('During synchronization to Avalon went something wrong! ({})'.format(message)) + session.commit() + exc_type, exc_obj, exc_tb = sys.exc_info() + fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] + log_message = "{}/{}/Line: {}".format(exc_type, fname, exc_tb.tb_lineno) + self.log.error('Error during syncToAvalon: {}'.format(log_message)) + message = 'Unexpected Error - Please check Log for more information' if len(message) > 0: + message = "Unable to sync: {}".format(message) return { 'success': False, 'message': message @@ -126,162 +189,116 @@ class SyncToAvalon(BaseAction): for child in childrens: self.getShotAsset(child) - def checkName(self, input_name): - if input_name.find(" ") == -1: - name = input_name - else: - name = input_name.replace(" ", "-") - self.log.info("Name of {} was changed to {}".format(input_name, name)) - return name - - def importToAvalon(self, session, entity): - eLinks = [] - - ca_mongoid = 'avalon_mongo_id' - - # get needed info of entity and all parents - for e in entity['link']: - tmp = session.get(e['type'], e['id']) - eLinks.append(tmp) - - entityProj = eLinks[0] - - # set AVALON_PROJECT env - os.environ["AVALON_PROJECT"] = entityProj["full_name"] - os.environ["AVALON_ASSET"] = entityProj['full_name'] - # --- Begin: PUSH TO Avalon --- - io.install() - ## ----- PROJECT ------ - # If project don't exists -> ELSE - avalon_project = io.find_one({"type": "project", "name": entityProj["full_name"]}) + entity_type = entity.entity_type - data = {} - data['ftrackId'] = entity['id'] - data['entityType'] = entity_type - - for cust_attr in self.custom_attributes: - key = cust_attr['key'] - if cust_attr['entity_type'].lower() in ['asset']: - data[key] = entity['custom_attributes'][key] - - elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': - data[key] = entity['custom_attributes'][key] - - elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': - # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') - entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) - # Get object id of entity type - ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] - - if cust_attr['object_type_id'] == ent_obj_type_id: - data[key] = entity['custom_attributes'][key] - - if entity_type.lower() in ['project']: # Set project Config config = ftrack_utils.get_config(entity) # Set project template template = lib.get_avalon_project_template_schema() + if self.ca_mongoid in entity['custom_attributes']: + try: + projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid]) + self.avalon_project = io.find_one({"_id": projectId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) - if avalon_project is None: - inventory.save(entityProj['full_name'], config, template) - else: - io.update_many({'type': 'project','name': entityProj['full_name']}, - {'$set':{'config':config}}) + if self.avalon_project is None: + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) + if self.avalon_project is None: + inventory.save(entity['full_name'], config, template) + self.avalon_project = io.find_one({ + "type": "project", + "name": entity["full_name"] + }) - data['code'] = entity['name'] + elif self.avalon_project['name'] != entity['full_name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(avalon_asset['name'], name)) + + data = ftrack_utils.get_data(self, entity, session,self.custom_attributes) # Store info about project (FtrackId) io.update_many({ 'type': 'project', - 'name': entity['full_name']}, - {'$set':{'data':data}}) + 'name': entity['full_name'] + }, { + '$set':{'data':data, 'config':config} + }) - projectId = io.find_one({"type": "project", "name": entityProj["full_name"]})["_id"] - if ca_mongoid in entity['custom_attributes']: - entity['custom_attributes'][ca_mongoid] = str(projectId) + self.projectId = self.avalon_project["_id"] + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(self.projectId) else: self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) - io.uninstall() return - # Store project Id - projectId = avalon_project["_id"] - ## ----- ASSETS ------ # Presets: - # TODO how to check if entity is Asset Library or AssetBuild? - if entity_type in ['AssetBuild', 'Library']: - silo = 'Assets' + data = ftrack_utils.get_data(self, entity, session, self.custom_attributes) + + # return if entity is silo + if len(data['parents']) == 0: + return else: - silo = 'Film' + silo = data['parents'][0] os.environ['AVALON_SILO'] = silo - # Get list of parents without project - parents = [] - for i in range(1, len(eLinks)-1): - parents.append(eLinks[i]) - - # Get info for 'Data' in Avalon DB - tasks = [] - for child in entity['children']: - if child.entity_type in ['Task']: - tasks.append(child['name']) - - folderStruct = [] - parentId = None - - for parent in parents: - name = self.checkName(parent['name']) - folderStruct.append(name) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] - if parent['parent'].entity_type != 'project' and parentId is None: - self.importToAvalon(parent) - parentId = io.find_one({'type': 'asset', 'name': name})['_id'] - - hierarchy = os.path.sep.join(folderStruct) - - data['visualParent'] = parentId - data['parents'] = folderStruct - data['tasks'] = tasks - data['hierarchy'] = hierarchy - - - name = self.checkName(entity['name']) + name = entity['name'] os.environ['AVALON_ASSET'] = name - # Try to find asset in current database - avalon_asset = io.find_one({'type': 'asset', 'name': name}) - # Create if don't exists - if avalon_asset is None: - inventory.create_asset(name, silo, data, projectId) - self.log.debug("Asset {} - created".format(name)) - # Raise error if it seems to be different ent. with same name - elif (avalon_asset['data']['ftrackId'] != data['ftrackId'] or - avalon_asset['data']['visualParent'] != data['visualParent'] or - avalon_asset['data']['parents'] != data['parents']): - raise ValueError('Entity <{}> is not same'.format(name)) - # Else update info - else: - io.update_many({'type': 'asset','name': name}, - {'$set':{'data':data, 'silo': silo}}) - # TODO check if is asset in same folder!!! ???? FEATURE FOR FUTURE - self.log.debug("Asset {} - updated".format(name)) + # Try to find asset in current database + avalon_asset = None + if self.ca_mongoid in entity['custom_attributes']: + try: + entityId = ObjectId(entity['custom_attributes'][self.ca_mongoid]) + avalon_asset = io.find_one({"_id": entityId}) + except: + self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name'])) + + if avalon_asset is None: + avalon_asset = io.find_one({'type': 'asset', 'name': name}) + # Create if don't exists + if avalon_asset is None: + inventory.create_asset(name, silo, data, self.projectId) + self.log.debug("Asset {} - created".format(name)) + + # Raise error if it seems to be different ent. with same name + else: + aD = avalon_asset['data'] + # check_attr = ['parents', 'ftrackId', 'visualParent'] + if (avalon_asset['data']['parents'] != data['parents'] or + avalon_asset['silo'] != silo): + raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name)) + + elif avalon_asset['name'] != entity['name']: + raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please create new asset'.format(avalon_asset['name'], name)) + elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']: + old_path = "/".join(avalon_asset['data']['parents']) + new_path = "/".join(data['parents']) + raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" '.format(avalon_asset['name'], old_path, new_path)) + + # Update info + io.update_many({'type': 'asset','name': name}, + {'$set':{'data':data, 'silo': silo}}) + + self.log.debug("Asset {} - updated".format(name)) entityId = io.find_one({'type': 'asset', 'name': name})['_id'] ## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK # Set custom attribute to avalon/mongo id of entity (parentID is last) - if ca_mongoid in entity['custom_attributes']: - entity['custom_attributes'][ca_mongoid] = str(entityId) + if self.ca_mongoid in entity['custom_attributes']: + entity['custom_attributes'][self.ca_mongoid] = str(entityId) else: self.log.error("Custom attribute for <{}> is not created.".format(entity['name'])) - io.uninstall() session.commit() diff --git a/pype/ftrack/actions/ftrack_action_handler.py b/pype/ftrack/actions/ftrack_action_handler.py index fa108ec78b..9bfd870316 100644 --- a/pype/ftrack/actions/ftrack_action_handler.py +++ b/pype/ftrack/actions/ftrack_action_handler.py @@ -1,6 +1,7 @@ # :coding: utf-8 # :copyright: Copyright (c) 2017 ftrack import os +import sys import logging import getpass import platform @@ -10,14 +11,12 @@ from avalon import io, lib, pipeline from avalon import session as sess import acre -from app.api import ( - Templates, - Logger -) -t = Templates( - type=["anatomy"] -) +from pype import api as pype + +log = pype.Logger.getLogger(__name__, "ftrack") + +log.debug("pype.Anatomy: {}".format(pype.Anatomy)) class AppAction(object): @@ -34,7 +33,7 @@ class AppAction(object): def __init__(self, session, label, name, executable, variant=None, icon=None, description=None): '''Expects a ftrack_api.Session instance''' - self.log = Logger.getLogger(self.__class__.__name__) + self.log = pype.Logger.getLogger(self.__class__.__name__) # self.logger = Logger.getLogger(__name__) @@ -74,6 +73,8 @@ class AppAction(object): self._launch ) + self.log.info("Application '{}' - Registered successfully".format(self.label)) + def _discover(self, event): args = self._translate_event( self.session, event @@ -241,7 +242,9 @@ class AppAction(object): os.environ["AVALON_APP"] = self.identifier os.environ["AVALON_APP_NAME"] = self.identifier + "_" + self.variant - anatomy = t.anatomy + os.environ["FTRACK_TASKID"] = id + + anatomy = pype.Anatomy io.install() hierarchy = io.find_one({"type": 'asset', "name": entity['parent']['name']})[ 'data']['parents'] @@ -255,9 +258,10 @@ class AppAction(object): "task": entity['name'], "asset": entity['parent']['name'], "hierarchy": hierarchy} - - anatomy = anatomy.format(data) - + try: + anatomy = anatomy.format(data) + except Exception as e: + log.error("{0} Error in anatomy.format: {1}".format(__name__, e)) os.environ["AVALON_WORKDIR"] = os.path.join(anatomy.work.root, anatomy.work.folder) # TODO Add paths to avalon setup from tomls @@ -297,20 +301,71 @@ class AppAction(object): # Full path to executable launcher execfile = None - for ext in os.environ["PATHEXT"].split(os.pathsep): - fpath = os.path.join(path.strip('"'), self.executable + ext) - if os.path.isfile(fpath) and os.access(fpath, os.X_OK): - execfile = fpath - break + if sys.platform == "win32": + + for ext in os.environ["PATHEXT"].split(os.pathsep): + fpath = os.path.join(path.strip('"'), self.executable + ext) + if os.path.isfile(fpath) and os.access(fpath, os.X_OK): + execfile = fpath + break + pass + + # Run SW if was found executable + if execfile is not None: + lib.launch(executable=execfile, args=[], environment=env) + else: + return { + 'success': False, + 'message': "We didn't found launcher for {0}" + .format(self.label) + } + pass + + if sys.platform.startswith('linux'): + execfile = os.path.join(path.strip('"'), self.executable) + if os.path.isfile(execfile): + try: + fp = open(execfile) + except PermissionError as p: + log.error('Access denied on {0} - {1}'. + format(execfile, p)) + return { + 'success': False, + 'message': "Access denied on launcher - {}". + format(execfile) + } + fp.close() + # check executable permission + if not os.access(execfile, os.X_OK): + log.error('No executable permission on {}'. + format(execfile)) + return { + 'success': False, + 'message': "No executable permission - {}" + .format(execfile) + } + pass + else: + log.error('Launcher doesn\'t exist - {}'. + format(execfile)) + return { + 'success': False, + 'message': "Launcher doesn't exist - {}" + .format(execfile) + } + pass + # Run SW if was found executable + if execfile is not None: + lib.launch('/usr/bin/env', args=['bash', execfile], environment=env) + else: + return { + 'success': False, + 'message': "We didn't found launcher for {0}" + .format(self.label) + } + pass + - # Run SW if was found executable - if execfile is not None: - lib.launch(executable=execfile, args=[], environment=env) - else: - return { - 'success': False, - 'message': "We didn't found launcher for {0}".format(self.label) - } # RUN TIMER IN FTRACK username = event['source']['user']['username'] @@ -398,7 +453,7 @@ class BaseAction(object): def __init__(self, session): '''Expects a ftrack_api.Session instance''' - self.log = Logger.getLogger(self.__class__.__name__) + self.log = pype.Logger.getLogger(self.__class__.__name__) if self.label is None: raise ValueError( @@ -435,7 +490,8 @@ class BaseAction(object): ), self._launch ) - self.log.info("----- action - <" + self.__class__.__name__ + "> - Has been registered -----") + + self.log.info("Action '{}' - Registered successfully".format(self.__class__.__name__)) def _discover(self, event): args = self._translate_event( diff --git a/pype/ftrack/ftrackRun.py b/pype/ftrack/ftrackRun.py index e90530b3b2..7fddf171da 100644 --- a/pype/ftrack/ftrackRun.py +++ b/pype/ftrack/ftrackRun.py @@ -7,11 +7,19 @@ import time from app import style from app.vendor.Qt import QtCore, QtGui, QtWidgets from pype.ftrack import credentials, login_dialog as login_dialog -from app.api import Logger + from FtrackServer import FtrackServer -log = Logger.getLogger(__name__) +from pype import api as pype + + +# load data from templates +pype.load_data_from_templates() + +log = pype.Logger.getLogger(__name__, "ftrack") # Validation if alredy logged into Ftrack + + class FtrackRunner: def __init__(self, main_parent=None, parent=None): @@ -76,7 +84,7 @@ class FtrackRunner: def runActionServer(self): if self.actionThread is None: self.actionThread = threading.Thread(target=self.setActionServer) - self.actionThread.daemon=True + self.actionThread.daemon = True self.actionThread.start() log.info("Ftrack action server launched") @@ -107,7 +115,7 @@ class FtrackRunner: def runEventServer(self): if self.eventThread is None: self.eventThread = threading.Thread(target=self.setEventServer) - self.eventThread.daemon=True + self.eventThread.daemon = True self.eventThread.start() log.info("Ftrack event server launched") @@ -168,9 +176,9 @@ class FtrackRunner: self.smEventS.addAction(self.aStopEventS) # Actions - basic - self.aLogin = QtWidgets.QAction("Login",self.menu) + self.aLogin = QtWidgets.QAction("Login", self.menu) self.aLogin.triggered.connect(self.validate) - self.aLogout = QtWidgets.QAction("Logout",self.menu) + self.aLogout = QtWidgets.QAction("Logout", self.menu) self.aLogout.triggered.connect(self.logout) self.menu.addAction(self.aLogin) diff --git a/pype/ftrack/ftrack_utils.py b/pype/ftrack/ftrack_utils.py index 7891aae0b9..66d739829b 100644 --- a/pype/ftrack/ftrack_utils.py +++ b/pype/ftrack/ftrack_utils.py @@ -1,10 +1,114 @@ -# fttrack help functions +import os +import sys +import re +from pprint import * import ftrack_api -import os -import traceback -from pprint import * from pype import lib +import avalon.io as io +import avalon.api +import avalon +from avalon.vendor import toml, jsonschema +from app.api import Logger + +log = Logger.getLogger(__name__) + +def get_data(parent, entity, session, custom_attributes): + entity_type = entity.entity_type + + data = {} + data['ftrackId'] = entity['id'] + data['entityType'] = entity_type + + for cust_attr in custom_attributes: + key = cust_attr['key'] + if cust_attr['entity_type'].lower() in ['asset']: + data[key] = entity['custom_attributes'][key] + + elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project': + data[key] = entity['custom_attributes'][key] + + elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project': + # Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build') + entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type) + # Get object id of entity type + ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id'] + + if cust_attr['object_type_id'] == ent_obj_type_id: + data[key] = entity['custom_attributes'][key] + + if entity_type in ['Project']: + data['code'] = entity['name'] + return data + + # Get info for 'Data' in Avalon DB + tasks = [] + for child in entity['children']: + if child.entity_type in ['Task']: + tasks.append(child['name']) + + # Get list of parents without project + parents = [] + folderStruct = [] + for i in range(1, len(entity['link'])-1): + parEnt = session.get(entity['link'][i]['type'], entity['link'][i]['id']) + parName = parEnt['name'] + folderStruct.append(parName) + if i > 1: + parents.append(parEnt) + + parentId = None + + for parent in parents: + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] + if parent['parent'].entity_type != 'project' and parentId is None: + parent.importToAvalon(parent) + parentId = io.find_one({'type': 'asset', 'name': parName})['_id'] + + hierarchy = os.path.sep.join(folderStruct) + + data['visualParent'] = parentId + data['parents'] = folderStruct + data['tasks'] = tasks + data['hierarchy'] = hierarchy + + return data + +def avalon_check_name(entity, inSchema = None): + ValidationError = jsonschema.ValidationError + alright = True + name = entity['name'] + if " " in name: + alright = False + + data = {} + data['data'] = {} + data['type'] = 'asset' + schema = "avalon-core:asset-2.0" + # TODO have project any REGEX check? + if entity.entity_type in ['Project']: + # data['type'] = 'project' + name = entity['full_name'] + # schema = get_avalon_project_template_schema()['schema'] + # elif entity.entity_type in ['AssetBuild','Library']: + # data['silo'] = 'Assets' + # else: + # data['silo'] = 'Film' + data['silo'] = 'Film' + + if inSchema is not None: + schema = inSchema + data['schema'] = schema + data['name'] = name + try: + avalon.schema.validate(data) + except ValidationError: + alright = False + + if alright is False: + raise ValueError("{} includes unsupported symbols like 'dash' or 'space'".format(name)) + + def get_apps(entity): """ Get apps from project @@ -18,10 +122,14 @@ def get_apps(entity): apps = [] for app in entity['custom_attributes']['applications']: try: - label = toml.load(lib.which_app(app))['label'] - apps.append({'name':app, 'label':label}) + app_config = {} + app_config['name'] = app + app_config['label'] = toml.load(avalon.lib.which_app(app))['label'] + + apps.append(app_config) + except Exception as e: - print('Error with application {0} - {1}'.format(app, e)) + log.warning('Error with application {0} - {1}'.format(app, e)) return apps def get_config(entity): diff --git a/pype/lib.py b/pype/lib.py index 34bdda2a17..0b9e066703 100644 --- a/pype/lib.py +++ b/pype/lib.py @@ -9,6 +9,7 @@ from .vendor.pather.error import ParseError import avalon.io as io import avalon.api +import avalon log = logging.getLogger(__name__) @@ -354,10 +355,12 @@ def get_avalon_project_template(): """ template = Templates(type=["anatomy"]) proj_template = {} - # proj_template['workfile'] = '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>' - # proj_template['work'] = '{root}/{project}/{hierarchy}/{asset}/work/{task}' - # proj_template['publish'] = '{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}' - proj_template['workfile'] = template.anatomy.avalon.workfile - proj_template['work'] = template.anatomy.avalon.work - proj_template['publish'] = template.anatomy.avalon.publish + proj_template['workfile'] = '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>' + proj_template['work'] = '{root}/{project}/{hierarchy}/{asset}/work/{task}' + proj_template['publish'] = '{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}' + # TODO this down should work but it can't be in default.toml: + # - Raises error when App (e.g. Nuke) is started + # proj_template['workfile'] = template.anatomy.avalon.workfile + # proj_template['work'] = template.anatomy.avalon.work + # proj_template['publish'] = template.anatomy.avalon.publish return proj_template diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index db2b1f4982..371fe2a786 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -1,8 +1,29 @@ import os - +import sys from avalon import api as avalon from pyblish import api as pyblish +from .. import api as pype + +from pype.nuke import menu + +from .lib import ( + create_write_node +) + +import nuke + +# removing logger handler created in avalon_core +for name, handler in [(handler.get_name(), handler) + for handler in pype.Logger.logging.root.handlers[:]]: + if "pype" not in str(name).lower(): + pype.Logger.logging.root.removeHandler(handler) + + +log = pype.Logger.getLogger(__name__, "nuke") + +AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") + PARENT_DIR = os.path.dirname(__file__) PACKAGE_DIR = os.path.dirname(PARENT_DIR) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") @@ -12,9 +33,77 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "nuke", "load") CREATE_PATH = os.path.join(PLUGINS_DIR, "nuke", "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nuke", "inventory") +self = sys.modules[__name__] +self.nLogger = None + + +class NukeHandler(pype.Logger.logging.Handler): + ''' + Nuke Handler - emits logs into nuke's script editor. + warning will emit nuke.warning() + critical and fatal would popup msg dialog to alert of the error. + ''' + + def __init__(self): + pype.Logger.logging.Handler.__init__(self) + self.set_name("Pype_Nuke_Handler") + + def emit(self, record): + # Formated message: + msg = self.format(record) + + if record.levelname.lower() in [ + "warning", + "critical", + "fatal", + "error" + ]: + nuke.message(msg) + + +'''Adding Nuke Logging Handler''' +nuke_handler = NukeHandler() +if nuke_handler.get_name() \ + not in [handler.get_name() + for handler in pype.Logger.logging.root.handlers[:]]: + pype.Logger.logging.getLogger().addHandler(nuke_handler) + +if not self.nLogger: + self.nLogger = pype.Logger + + +def reload_config(): + """Attempt to reload pipeline at run-time. + + CAUTION: This is primarily for development and debugging purposes. + + """ + + import importlib + + for module in ( + "app", + "app.api", + "{}.api".format(AVALON_CONFIG), + "{}.templates".format(AVALON_CONFIG), + "{}.nuke".format(AVALON_CONFIG), + "{}.nuke.lib".format(AVALON_CONFIG), + "{}.nuke.templates".format(AVALON_CONFIG), + "{}.nuke.menu".format(AVALON_CONFIG) + ): + log.info("Reloading module: {}...".format(module)) + module = importlib.import_module(module) + try: + reload(module) + except Exception: + importlib.reload(module) + def install(): - print("Registering Nuke plug-ins..") + pype.fill_avalon_workdir() + reload_config() + + log.info("Registering Nuke plug-ins..") pyblish.register_plugin_path(PUBLISH_PATH) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) avalon.register_plugin_path(avalon.Creator, CREATE_PATH) @@ -23,48 +112,56 @@ def install(): pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) # Disable all families except for the ones we explicitly want to see - family_states = ["imagesequence", - "camera", - "pointcache"] + family_states = [ + "render", + "still" + "lifeGroup", + "backdrop", + "imagesequence", + "mov" + "camera", + "pointcache", + ] avalon.data["familiesStateDefault"] = False avalon.data["familiesStateToggled"] = family_states - # # work files start at app start - # workfiles.show( - # os.environ["AVALON_WORKDIR"] - # ) + menu.install() + + # load data from templates + pype.load_data_from_templates() def uninstall(): - print("Deregistering Nuke plug-ins..") + log.info("Deregistering Nuke plug-ins..") pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) + # reset data from templates + pype.reset_data_from_templates() -def on_pyblish_instance_toggled(instance, new_value, old_value): - """Toggle saver tool passthrough states on instance toggles.""" - from avalon.nuke import viewer_update_and_undo_stop, add_publish_knob, log +def on_pyblish_instance_toggled(instance, old_value, new_value): + """Toggle node passthrough states on instance toggles.""" + self.log.info("instance toggle: {}, old_value: {}, new_value:{} ".format( + instance, old_value, new_value)) - writes = [n for n in instance if - n.Class() == "Write"] - if not writes: - return + from avalon.nuke import ( + viewer_update_and_undo_stop, + add_publish_knob + ) # Whether instances should be passthrough based on new value - passthrough = not new_value - with viewer_update_and_undo_stop(): - for n in writes: - try: - n["publish"].value() - except ValueError: - n = add_publish_knob(n) - log.info(" `Publish` knob was added to write node..") - current = n["publish"].value() - if current != passthrough: - n["publish"].setValue(passthrough) + with viewer_update_and_undo_stop(): + n = instance[0] + try: + n["publish"].value() + except ValueError: + n = add_publish_knob(n) + log.info(" `Publish` knob was added to write node..") + + n["publish"].setValue(new_value) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 3971b7c977..79c292b2ba 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -1,14 +1,254 @@ import sys - +from collections import OrderedDict +from pprint import pprint from avalon.vendor.Qt import QtGui import avalon.nuke - +import pype.api as pype import nuke +log = pype.Logger.getLogger(__name__, "nuke") self = sys.modules[__name__] self._project = None +def format_anatomy(data): + from .templates import ( + get_anatomy + ) + file = script_name() + + anatomy = get_anatomy() + + # TODO: perhaps should be in try! + padding = anatomy.render.padding + + data.update({ + "hierarchy": pype.get_hiearchy(), + "frame": "#"*padding, + "VERSION": pype.get_version_from_workfile(file) + }) + + # log.info("format_anatomy:anatomy: {}".format(anatomy)) + return anatomy.format(data) + + +def script_name(): + return nuke.root().knob('name').value() + + +def create_write_node(name, data): + from .templates import ( + get_dataflow, + get_colorspace + ) + nuke_dataflow_writes = get_dataflow(**data) + nuke_colorspace_writes = get_colorspace(**data) + try: + anatomy_filled = format_anatomy({ + "subset": data["avalon"]["subset"], + "asset": data["avalon"]["asset"], + "task": pype.get_task(), + "family": data["avalon"]["family"], + "project": {"name": pype.get_project_name(), + "code": pype.get_project_code()}, + "representation": nuke_dataflow_writes.file_type, + }) + except Exception as e: + log.error("problem with resolving anatomy tepmlate: {}".format(e)) + + log.debug("anatomy_filled.render: {}".format(anatomy_filled.render)) + + _data = OrderedDict({ + "file": str(anatomy_filled.render.path).replace("\\", "/") + }) + + # adding dataflow template + {_data.update({k: v}) + for k, v in nuke_dataflow_writes.items() + if k not in ["id", "previous"]} + + # adding dataflow template + {_data.update({k: v}) + for k, v in nuke_colorspace_writes.items()} + + _data = avalon.nuke.lib.fix_data_for_node_create(_data) + + log.debug(_data) + + _data["frame_range"] = data.get("frame_range", None) + + instance = avalon.nuke.lib.add_write_node( + name, + **_data + ) + instance = avalon.nuke.lib.imprint(instance, data["avalon"]) + add_rendering_knobs(instance) + return instance + + +def add_rendering_knobs(node): + if "render" not in node.knobs(): + knob = nuke.Boolean_Knob("render", "Render") + knob.setFlag(0x1000) + knob.setValue(False) + node.addKnob(knob) + if "render_farm" not in node.knobs(): + knob = nuke.Boolean_Knob("render_farm", "Render on Farm") + knob.setValue(False) + node.addKnob(knob) + return node + + +def update_frame_range(start, end, root=None): + """Set Nuke script start and end frame range + + Args: + start (float, int): start frame + end (float, int): end frame + root (object, Optional): root object from nuke's script + + Returns: + None + + """ + + knobs = { + "first_frame": start, + "last_frame": end + } + + with avalon.nuke.viewer_update_and_undo_stop(): + for key, value in knobs.items(): + if root: + root[key].setValue(value) + else: + nuke.root()[key].setValue(value) + + +def get_additional_data(container): + """Get Nuke's related data for the container + + Args: + container(dict): the container found by the ls() function + + Returns: + dict + """ + + node = container["_tool"] + tile_color = node['tile_color'].value() + if tile_color is None: + return {} + + hex = '%08x' % tile_color + rgba = [ + float(int(hex[0:2], 16)) / 255.0, + float(int(hex[2:4], 16)) / 255.0, + float(int(hex[4:6], 16)) / 255.0 + ] + + return {"color": QtGui.QColor().fromRgbF(rgba[0], rgba[1], rgba[2])} + + +def set_viewers_colorspace(viewer): + assert isinstance(viewer, dict), log.error( + "set_viewers_colorspace(): argument should be dictionary") + + filter_knobs = [ + "viewerProcess", + "wipe_position" + ] + viewers = [n for n in nuke.allNodes() if n.Class() == 'Viewer'] + erased_viewers = [] + + for v in viewers: + v['viewerProcess'].setValue(str(viewer.viewerProcess)) + if str(viewer.viewerProcess) not in v['viewerProcess'].value(): + copy_inputs = v.dependencies() + copy_knobs = {k: v[k].value() for k in v.knobs() + if k not in filter_knobs} + pprint(copy_knobs) + # delete viewer with wrong settings + erased_viewers.append(v['name'].value()) + nuke.delete(v) + + # create new viewer + nv = nuke.createNode("Viewer") + + # connect to original inputs + for i, n in enumerate(copy_inputs): + nv.setInput(i, n) + + # set coppied knobs + for k, v in copy_knobs.items(): + print(k, v) + nv[k].setValue(v) + + # set viewerProcess + nv['viewerProcess'].setValue(str(viewer.viewerProcess)) + + if erased_viewers: + log.warning( + "Attention! Viewer nodes {} were erased." + "It had wrong color profile".format(erased_viewers)) + + +def set_root_colorspace(root_dict): + assert isinstance(root_dict, dict), log.error( + "set_root_colorspace(): argument should be dictionary") + for knob, value in root_dict.items(): + if nuke.root()[knob].value() not in value: + nuke.root()[knob].setValue(str(value)) + log.info("nuke.root()['{}'] changed to: {}".format(knob, value)) + + +def set_writes_colorspace(write_dict): + assert isinstance(write_dict, dict), log.error( + "set_root_colorspace(): argument should be dictionary") + log.info("set_writes_colorspace(): {}".format(write_dict)) + + +def set_colorspace(): + from pype import api as pype + + nuke_colorspace = getattr(pype.Colorspace, "nuke", None) + + try: + set_root_colorspace(nuke_colorspace.root) + except AttributeError: + log.error( + "set_colorspace(): missing `root` settings in template") + try: + set_viewers_colorspace(nuke_colorspace.viewer) + except AttributeError: + log.error( + "set_colorspace(): missing `viewer` settings in template") + try: + set_writes_colorspace(nuke_colorspace.write) + except AttributeError: + log.error( + "set_colorspace(): missing `write` settings in template") + + try: + for key in nuke_colorspace: + log.info("{}".format(key)) + except TypeError: + log.error("Nuke is not in templates! \n\n\n" + "contact your supervisor!") + + +def get_avalon_knob_data(node): + import toml + try: + data = toml.loads(node['avalon'].value()) + except: + return None + return data + +# TODO: bellow functions are wip and needs to be check where they are used +# ------------------------------------ + + def update_frame_range(start, end, root=None): """Set Nuke script start and end frame range diff --git a/pype/nuke/menu.py b/pype/nuke/menu.py new file mode 100644 index 0000000000..97e2432e16 --- /dev/null +++ b/pype/nuke/menu.py @@ -0,0 +1,12 @@ +import nuke +from avalon.api import Session + +from pype.nuke import lib + + +def install(): + menubar = nuke.menu("Nuke") + menu = menubar.findItem(Session["AVALON_LABEL"]) + + menu.addSeparator() + menu.addCommand("Set colorspace...", lib.set_colorspace) diff --git a/pype/nuke/templates.py b/pype/nuke/templates.py new file mode 100644 index 0000000000..16cb6062a2 --- /dev/null +++ b/pype/nuke/templates.py @@ -0,0 +1,41 @@ +from pype import api as pype + +log = pype.Logger.getLogger(__name__, "nuke") + + +def get_anatomy(**kwarg): + return pype.Anatomy + + +def get_dataflow(**kwarg): + log.info(kwarg) + host = kwarg.get("host", "nuke") + cls = kwarg.get("class", None) + preset = kwarg.get("preset", None) + assert any([host, cls]), log.error("nuke.templates.get_dataflow():" + "Missing mandatory kwargs `host`, `cls`") + + nuke_dataflow = getattr(pype.Dataflow, str(host), None) + nuke_dataflow_node = getattr(nuke_dataflow.nodes, str(cls), None) + if preset: + nuke_dataflow_node = getattr(nuke_dataflow_node, str(preset), None) + + log.info("Dataflow: {}".format(nuke_dataflow_node)) + return nuke_dataflow_node + + +def get_colorspace(**kwarg): + log.info(kwarg) + host = kwarg.get("host", "nuke") + cls = kwarg.get("class", None) + preset = kwarg.get("preset", None) + assert any([host, cls]), log.error("nuke.templates.get_colorspace():" + "Missing mandatory kwargs `host`, `cls`") + + nuke_colorspace = getattr(pype.Colorspace, str(host), None) + nuke_colorspace_node = getattr(nuke_colorspace, str(cls), None) + if preset: + nuke_colorspace_node = getattr(nuke_colorspace_node, str(preset), None) + + log.info("Colorspace: {}".format(nuke_colorspace_node)) + return nuke_colorspace_node diff --git a/pype/plugins/ftrack/collect_ftrack_api.py b/pype/plugins/ftrack/collect_ftrack_api.py new file mode 100644 index 0000000000..ce83652e9c --- /dev/null +++ b/pype/plugins/ftrack/collect_ftrack_api.py @@ -0,0 +1,22 @@ +import os + +import ftrack_api_old as ftrack_api +import pyblish.api + + +class CollectFtrackApi(pyblish.api.ContextPlugin): + """ Collects an ftrack session and the current task id. """ + + order = pyblish.api.CollectorOrder + label = "Collect Ftrack Api" + + def process(self, context): + + # Collect session + session = ftrack_api.Session() + context.data["ftrackSession"] = session + + # Collect task + task_id = os.environ.get("FTRACK_TASKID", "") + + context.data["ftrackTask"] = session.get("Task", task_id) diff --git a/pype/plugins/ftrack/integrate_ftrack_api.py b/pype/plugins/ftrack/integrate_ftrack_api.py new file mode 100644 index 0000000000..e6624dcf51 --- /dev/null +++ b/pype/plugins/ftrack/integrate_ftrack_api.py @@ -0,0 +1,288 @@ +import os + +import pyblish.api +import clique + + +class IntegrateFtrackApi(pyblish.api.InstancePlugin): + """ Commit components to server. """ + + order = pyblish.api.IntegratorOrder+0.499 + label = "Integrate Ftrack Api" + families = ["ftrack"] + + def query(self, entitytype, data): + """ Generate a query expression from data supplied. + + If a value is not a string, we'll add the id of the entity to the + query. + + Args: + entitytype (str): The type of entity to query. + data (dict): The data to identify the entity. + exclusions (list): All keys to exclude from the query. + + Returns: + str: String query to use with "session.query" + """ + queries = [] + for key, value in data.iteritems(): + if not isinstance(value, (basestring, int)): + if "id" in value.keys(): + queries.append( + "{0}.id is \"{1}\"".format(key, value["id"]) + ) + else: + queries.append("{0} is \"{1}\"".format(key, value)) + + query = ( + "select id from " + entitytype + " where " + " and ".join(queries) + ) + self.log.debug(query) + return query + + def process(self, instance): + + session = instance.context.data["ftrackSession"] + task = instance.context.data["ftrackTask"] + + info_msg = "Created new {entity_type} with data: {data}" + info_msg += ", metadata: {metadata}." + + # Iterate over components and publish + for data in instance.data.get("ftrackComponentsList", []): + + # AssetType + # Get existing entity. + assettype_data = {"short": "upload"} + assettype_data.update(data.get("assettype_data", {})) + + assettype_entity = session.query( + self.query("AssetType", assettype_data) + ).first() + + # Create a new entity if none exits. + if not assettype_entity: + assettype_entity = session.create("AssetType", assettype_data) + self.log.info( + "Created new AssetType with data: ".format(assettype_data) + ) + + # Asset + # Get existing entity. + asset_data = { + "name": task["name"], + "type": assettype_entity, + "parent": task["parent"], + } + asset_data.update(data.get("asset_data", {})) + + asset_entity = session.query( + self.query("Asset", asset_data) + ).first() + + # Extracting metadata, and adding after entity creation. This is + # due to a ftrack_api bug where you can't add metadata on creation. + asset_metadata = asset_data.pop("metadata", {}) + + # Create a new entity if none exits. + if not asset_entity: + asset_entity = session.create("Asset", asset_data) + self.log.info( + info_msg.format( + entity_type="Asset", + data=asset_data, + metadata=asset_metadata + ) + ) + + # Adding metadata + existing_asset_metadata = asset_entity["metadata"] + existing_asset_metadata.update(asset_metadata) + asset_entity["metadata"] = existing_asset_metadata + + # AssetVersion + # Get existing entity. + assetversion_data = { + "version": 0, + "asset": asset_entity, + "task": task + } + assetversion_data.update(data.get("assetversion_data", {})) + + assetversion_entity = session.query( + self.query("AssetVersion", assetversion_data) + ).first() + + # Extracting metadata, and adding after entity creation. This is + # due to a ftrack_api bug where you can't add metadata on creation. + assetversion_metadata = assetversion_data.pop("metadata", {}) + + # Create a new entity if none exits. + if not assetversion_entity: + assetversion_entity = session.create( + "AssetVersion", assetversion_data + ) + self.log.info( + info_msg.format( + entity_type="AssetVersion", + data=assetversion_data, + metadata=assetversion_metadata + ) + ) + + # Adding metadata + existing_assetversion_metadata = assetversion_entity["metadata"] + existing_assetversion_metadata.update(assetversion_metadata) + assetversion_entity["metadata"] = existing_assetversion_metadata + + # Have to commit the version and asset, because location can't + # determine the final location without. + session.commit() + + # Component + # Get existing entity. + component_data = { + "name": "main", + "version": assetversion_entity + } + component_data.update(data.get("component_data", {})) + + component_entity = session.query( + self.query("Component", component_data) + ).first() + + component_overwrite = data.get("component_overwrite", False) + location = data.get("component_location", session.pick_location()) + + # Overwrite existing component data if requested. + if component_entity and component_overwrite: + + origin_location = session.query( + "Location where name is \"ftrack.origin\"" + ).one() + + # Removing existing members from location + components = list(component_entity.get("members", [])) + components += [component_entity] + for component in components: + for loc in component["component_locations"]: + if location["id"] == loc["location_id"]: + location.remove_component( + component, recursive=False + ) + + # Deleting existing members on component entity + for member in component_entity.get("members", []): + session.delete(member) + del(member) + + session.commit() + + # Reset members in memory + if "members" in component_entity.keys(): + component_entity["members"] = [] + + # Add components to origin location + try: + collection = clique.parse(data["component_path"]) + except ValueError: + # Assume its a single file + # Changing file type + name, ext = os.path.splitext(data["component_path"]) + component_entity["file_type"] = ext + + origin_location.add_component( + component_entity, data["component_path"] + ) + else: + # Changing file type + component_entity["file_type"] = collection.format("{tail}") + + # Create member components for sequence. + for member_path in collection: + + size = 0 + try: + size = os.path.getsize(member_path) + except OSError: + pass + + name = collection.match(member_path).group("index") + + member_data = { + "name": name, + "container": component_entity, + "size": size, + "file_type": os.path.splitext(member_path)[-1] + } + + component = session.create( + "FileComponent", member_data + ) + origin_location.add_component( + component, member_path, recursive=False + ) + component_entity["members"].append(component) + + # Add components to location. + location.add_component( + component_entity, origin_location, recursive=True + ) + + data["component"] = component_entity + msg = "Overwriting Component with path: {0}, data: {1}, " + msg += "location: {2}" + self.log.info( + msg.format( + data["component_path"], + component_data, + location + ) + ) + + # Extracting metadata, and adding after entity creation. This is + # due to a ftrack_api bug where you can't add metadata on creation. + component_metadata = component_data.pop("metadata", {}) + + # Create new component if none exists. + new_component = False + if not component_entity: + component_entity = assetversion_entity.create_component( + data["component_path"], + data=component_data, + location=location + ) + data["component"] = component_entity + msg = "Created new Component with path: {0}, data: {1}" + msg += ", metadata: {2}, location: {3}" + self.log.info( + msg.format( + data["component_path"], + component_data, + component_metadata, + location + ) + ) + new_component = True + + # Adding metadata + existing_component_metadata = component_entity["metadata"] + existing_component_metadata.update(component_metadata) + component_entity["metadata"] = existing_component_metadata + + # Setting assetversion thumbnail + if data.get("thumbnail", False): + assetversion_entity["thumbnail_id"] = component_entity["id"] + + # Inform user about no changes to the database. + if (component_entity and not component_overwrite and + not new_component): + data["component"] = component_entity + self.log.info( + "Found existing component, and no request to overwrite. " + "Nothing has been changed." + ) + else: + # Commit changes. + session.commit() diff --git a/pype/plugins/ftrack/integrate_ftrack_instances.py b/pype/plugins/ftrack/integrate_ftrack_instances.py new file mode 100644 index 0000000000..9a0a36a413 --- /dev/null +++ b/pype/plugins/ftrack/integrate_ftrack_instances.py @@ -0,0 +1,67 @@ +import pyblish.api +import os + + +class IntegrateFtrackInstance(pyblish.api.InstancePlugin): + """Collect ftrack component data + + Add ftrack component list to instance. + + + """ + + order = pyblish.api.IntegratorOrder + 0.48 + label = 'Integrate Ftrack Component' + + family_mapping = {'camera': 'cam', + 'look': 'look', + 'mayaAscii': 'scene', + 'model': 'geo', + 'rig': 'rig', + 'setdress': 'setdress', + 'pointcache': 'cache', + 'review': 'mov'} + + def process(self, instance): + + self.log.debug('instance {}'.format(instance)) + + assumed_data = instance.data["assumedTemplateData"] + assumed_version = assumed_data["version"] + version_number = int(assumed_version) + family = instance.data['family'].lower() + asset_type = '' + + asset_type = self.family_mapping[family] + + componentList = [] + + transfers = instance.data["transfers"] + + ft_session = instance.context.data["ftrackSession"] + location = ft_session.query( + 'Location where name is "ftrack.unmanaged"').one() + self.log.debug('location {}'.format(location)) + + for src, dest in transfers: + filename, ext = os.path.splitext(src) + self.log.debug('source filename: ' + filename) + self.log.debug('source ext: ' + ext) + + componentList.append({"assettype_data": { + "short": asset_type, + }, + "assetversion_data": { + "version": version_number, + }, + "component_data": { + "name": ext[1:], # Default component name is "main". + }, + "component_path": dest, + 'component_location': location, + "component_overwrite": False, + } + ) + + self.log.debug('componentsList: {}'.format(str(componentList))) + instance.data["ftrackComponentsList"] = componentList diff --git a/pype/plugins/global/publish/collect_deadline_user.py b/pype/plugins/global/_publish_unused/collect_deadline_user.py similarity index 100% rename from pype/plugins/global/publish/collect_deadline_user.py rename to pype/plugins/global/_publish_unused/collect_deadline_user.py diff --git a/pype/plugins/global/publish/collect_comment.py b/pype/plugins/global/publish/collect_comment.py index a246b7eaba..5bbd1da2a1 100644 --- a/pype/plugins/global/publish/collect_comment.py +++ b/pype/plugins/global/publish/collect_comment.py @@ -1,7 +1,7 @@ import pyblish.api -class CollectColorbleedComment(pyblish.api.ContextPlugin): +class CollectComment(pyblish.api.ContextPlugin): """This plug-ins displays the comment dialog box per default""" label = "Collect Comment" diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index f2a3da7df4..48b6c448e3 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -18,6 +18,3 @@ class CollectTemplates(pyblish.api.ContextPlugin): type=["anatomy"] ) context.data['anatomy'] = templates.anatomy - for key in templates.anatomy: - self.log.info(str(key) + ": " + str(templates.anatomy[key])) - # return diff --git a/pype/plugins/global/publish/collect_time.py b/pype/plugins/global/publish/collect_time.py index d4fa658425..e0adc7dfc3 100644 --- a/pype/plugins/global/publish/collect_time.py +++ b/pype/plugins/global/publish/collect_time.py @@ -2,7 +2,7 @@ import pyblish.api from avalon import api -class CollectMindbenderTime(pyblish.api.ContextPlugin): +class CollectTime(pyblish.api.ContextPlugin): """Store global time at the time of publish""" label = "Collect Current Time" diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py index 87ffa2aaa3..e20f59133c 100644 --- a/pype/plugins/global/publish/integrate.py +++ b/pype/plugins/global/publish/integrate.py @@ -233,6 +233,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "root": root, "project": PROJECT, "projectcode": "prjX", + 'task': api.Session["AVALON_TASK"], "silo": asset['silo'], "asset": ASSET, "family": instance.data['family'], diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 1933713577..cb852f7c43 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -125,7 +125,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin): hosts = ["fusion", "maya", "nuke"] families = [ - "saver.deadline", + "render.deadline", "renderlayer", "imagesequence" ] diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py index cbfecdc0ad..0daf3cc19c 100644 --- a/pype/plugins/maya/publish/collect_look.py +++ b/pype/plugins/maya/publish/collect_look.py @@ -107,6 +107,7 @@ def seq_to_glob(path): "": "" } + lower = path.lower() has_pattern = False for pattern, regex_pattern in patterns.items(): @@ -213,6 +214,9 @@ class CollectLook(pyblish.api.InstancePlugin): with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance) + # make ftrack publishable + instance.data["families"] = ['ftrack'] + def collect(self, instance): self.log.info("Looking for look associations " diff --git a/pype/plugins/maya/publish/collect_model.py b/pype/plugins/maya/publish/collect_model.py index 47808934b3..393bb82910 100644 --- a/pype/plugins/maya/publish/collect_model.py +++ b/pype/plugins/maya/publish/collect_model.py @@ -7,7 +7,7 @@ class CollectModelData(pyblish.api.InstancePlugin): """Collect model data Ensures always only a single frame is extracted (current frame). - + Note: This is a workaround so that the `studio.model` family can use the same pointcache extractor implementation as animation and pointcaches. @@ -24,3 +24,6 @@ class CollectModelData(pyblish.api.InstancePlugin): frame = cmds.currentTime(query=True) instance.data['startFrame'] = frame instance.data['endFrame'] = frame + + # make ftrack publishable + instance.data["families"] = ['ftrack'] diff --git a/pype/plugins/nuke/_publish_unused/collect_render_target.py b/pype/plugins/nuke/_publish_unused/collect_render_target.py new file mode 100644 index 0000000000..86a38f26b6 --- /dev/null +++ b/pype/plugins/nuke/_publish_unused/collect_render_target.py @@ -0,0 +1,47 @@ +import pyblish.api + + +class CollectNukeRenderMode(pyblish.api.InstancePlugin): + # TODO: rewrite docstring to nuke + """Collect current comp's render Mode + + Options: + local + deadline + + Note that this value is set for each comp separately. When you save the + comp this information will be stored in that file. If for some reason the + available tool does not visualize which render mode is set for the + current comp, please run the following line in the console (Py2) + + comp.GetData("rendermode") + + This will return the name of the current render mode as seen above under + Options. + + """ + + order = pyblish.api.CollectorOrder + 0.4 + label = "Collect Render Mode" + hosts = ["nuke"] + families = ["write", "render.local"] + + def process(self, instance): + """Collect all image sequence tools""" + options = ["local", "deadline"] + + node = instance[0] + + if bool(node["render_local"].getValue()): + rendermode = "local" + else: + rendermode = "deadline" + + assert rendermode in options, "Must be supported render mode" + + # Append family + instance.data["families"].remove("render") + family = "render.{0}".format(rendermode) + instance.data["families"].append(family) + + self.log.info("Render mode: {0}".format(rendermode)) diff --git a/pype/plugins/nuke/_publish_unused/extract_nuke_write.py b/pype/plugins/nuke/_publish_unused/extract_nuke_write.py new file mode 100644 index 0000000000..155b5cf56d --- /dev/null +++ b/pype/plugins/nuke/_publish_unused/extract_nuke_write.py @@ -0,0 +1,116 @@ +import os + +import nuke +import pyblish.api + + +class Extract(pyblish.api.InstancePlugin): + """Super class for write and writegeo extractors.""" + + order = pyblish.api.ExtractorOrder + optional = True + label = "Extract Nuke [super]" + hosts = ["nuke"] + match = pyblish.api.Subset + + # targets = ["process.local"] + + def execute(self, instance): + # Get frame range + node = instance[0] + first_frame = nuke.root()["first_frame"].value() + last_frame = nuke.root()["last_frame"].value() + + if node["use_limit"].value(): + first_frame = node["first"].value() + last_frame = node["last"].value() + + # Render frames + nuke.execute(node.name(), int(first_frame), int(last_frame)) + + +class ExtractNukeWrite(Extract): + """ Extract output from write nodes. """ + + families = ["write", "local"] + label = "Extract Write" + + def process(self, instance): + + self.execute(instance) + + # Validate output + for filename in list(instance.data["collection"]): + if not os.path.exists(filename): + instance.data["collection"].remove(filename) + self.log.warning("\"{0}\" didn't render.".format(filename)) + + +class ExtractNukeCache(Extract): + + label = "Cache" + families = ["cache", "local"] + + def process(self, instance): + + self.execute(instance) + + # Validate output + msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) + assert os.path.exists(instance.data["output_path"]), msg + + +class ExtractNukeCamera(Extract): + + label = "Camera" + families = ["camera", "local"] + + def process(self, instance): + + node = instance[0] + node["writeGeometries"].setValue(False) + node["writePointClouds"].setValue(False) + node["writeAxes"].setValue(False) + + file_path = node["file"].getValue() + node["file"].setValue(instance.data["output_path"]) + + self.execute(instance) + + node["writeGeometries"].setValue(True) + node["writePointClouds"].setValue(True) + node["writeAxes"].setValue(True) + + node["file"].setValue(file_path) + + # Validate output + msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) + assert os.path.exists(instance.data["output_path"]), msg + + +class ExtractNukeGeometry(Extract): + + label = "Geometry" + families = ["geometry", "local"] + + def process(self, instance): + + node = instance[0] + node["writeCameras"].setValue(False) + node["writePointClouds"].setValue(False) + node["writeAxes"].setValue(False) + + file_path = node["file"].getValue() + node["file"].setValue(instance.data["output_path"]) + + self.execute(instance) + + node["writeCameras"].setValue(True) + node["writePointClouds"].setValue(True) + node["writeAxes"].setValue(True) + + node["file"].setValue(file_path) + + # Validate output + msg = "\"{0}\" didn't render.".format(instance.data["output_path"]) + assert os.path.exists(instance.data["output_path"]), msg diff --git a/pype/plugins/nuke/_publish_unused/publish_image_sequences.py b/pype/plugins/nuke/_publish_unused/publish_image_sequences.py new file mode 100644 index 0000000000..e8b468e94a --- /dev/null +++ b/pype/plugins/nuke/_publish_unused/publish_image_sequences.py @@ -0,0 +1,98 @@ +import re +import os +import json +import subprocess + +import pyblish.api + +from pype.action import get_errored_plugins_from_data + + +def _get_script(): + """Get path to the image sequence script""" + + # todo: use a more elegant way to get the python script + + try: + from pype.fusion.scripts import publish_filesequence + except Exception: + raise RuntimeError("Expected module 'publish_imagesequence'" + "to be available") + + module_path = publish_filesequence.__file__ + if module_path.endswith(".pyc"): + module_path = module_path[:-len(".pyc")] + ".py" + + return module_path + + +class PublishImageSequence(pyblish.api.InstancePlugin): + """Publish the generated local image sequences.""" + + order = pyblish.api.IntegratorOrder + label = "Publish Rendered Image Sequence(s)" + hosts = ["fusion"] + families = ["saver.renderlocal"] + + def process(self, instance): + + # Skip this plug-in if the ExtractImageSequence failed + errored_plugins = get_errored_plugins_from_data(instance.context) + if any(plugin.__name__ == "FusionRenderLocal" for plugin in + errored_plugins): + raise RuntimeError("Fusion local render failed, " + "publishing images skipped.") + + subset = instance.data["subset"] + ext = instance.data["ext"] + + # Regex to match resulting renders + regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset), + ext=re.escape(ext)) + + # The instance has most of the information already stored + metadata = { + "regex": regex, + "startFrame": instance.context.data["startFrame"], + "endFrame": instance.context.data["endFrame"], + "families": ["imagesequence"], + } + + # Write metadata and store the path in the instance + output_directory = instance.data["outputDir"] + path = os.path.join(output_directory, + "{}_metadata.json".format(subset)) + with open(path, "w") as f: + json.dump(metadata, f) + + assert os.path.isfile(path), ("Stored path is not a file for %s" + % instance.data["name"]) + + # Suppress any subprocess console + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + startupinfo.wShowWindow = subprocess.SW_HIDE + + process = subprocess.Popen(["python", _get_script(), + "--paths", path], + bufsize=1, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=startupinfo) + + while True: + output = process.stdout.readline() + # Break when there is no output or a return code has been given + if output == '' and process.poll() is not None: + process.stdout.close() + break + if output: + line = output.strip() + if line.startswith("ERROR"): + self.log.error(line) + else: + self.log.info(line) + + if process.returncode != 0: + raise RuntimeError("Process quit with non-zero " + "return code: {}".format(process.returncode)) diff --git a/pype/plugins/nuke/_publish_unused/submit_deadline.py b/pype/plugins/nuke/_publish_unused/submit_deadline.py new file mode 100644 index 0000000000..ffb298f75d --- /dev/null +++ b/pype/plugins/nuke/_publish_unused/submit_deadline.py @@ -0,0 +1,147 @@ +import os +import json +import getpass + +from avalon import api +from avalon.vendor import requests + +import pyblish.api + + +class NukeSubmitDeadline(pyblish.api.InstancePlugin): + # TODO: rewrite docstring to nuke + """Submit current Comp to Deadline + + Renders are submitted to a Deadline Web Service as + supplied via the environment variable AVALON_DEADLINE + + """ + + label = "Submit to Deadline" + order = pyblish.api.IntegratorOrder + hosts = ["nuke"] + families = ["write", "render.deadline"] + + def process(self, instance): + + context = instance.context + + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + + AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE", + "http://localhost:8082") + assert AVALON_DEADLINE, "Requires AVALON_DEADLINE" + + # Collect all saver instances in context that are to be rendered + write_instances = [] + for instance in context[:]: + if not self.families[0] in instance.data.get("families"): + # Allow only saver family instances + continue + + if not instance.data.get("publish", True): + # Skip inactive instances + continue + self.log.debug(instance.data["name"]) + write_instances.append(instance) + + if not write_instances: + raise RuntimeError("No instances found for Deadline submittion") + + hostVersion = int(context.data["hostVersion"]) + filepath = context.data["currentFile"] + filename = os.path.basename(filepath) + comment = context.data.get("comment", "") + deadline_user = context.data.get("deadlineUser", getpass.getuser()) + + # Documentation for keys available at: + # https://docs.thinkboxsoftware.com + # /products/deadline/8.0/1_User%20Manual/manual + # /manual-submission.html#job-info-file-options + payload = { + "JobInfo": { + # Top-level group name + "BatchName": filename, + + # Job name, as seen in Monitor + "Name": filename, + + # User, as seen in Monitor + "UserName": deadline_user, + + # Use a default submission pool for Nuke + "Pool": "nuke", + + "Plugin": "Nuke", + "Frames": "{start}-{end}".format( + start=int(instance.data["startFrame"]), + end=int(instance.data["endFrame"]) + ), + + "Comment": comment, + }, + "PluginInfo": { + # Input + "FlowFile": filepath, + + # Mandatory for Deadline + "Version": str(hostVersion), + + # Render in high quality + "HighQuality": True, + + # Whether saver output should be checked after rendering + # is complete + "CheckOutput": True, + + # Proxy: higher numbers smaller images for faster test renders + # 1 = no proxy quality + "Proxy": 1, + }, + + # Mandatory for Deadline, may be empty + "AuxFiles": [] + } + + # Enable going to rendered frames from Deadline Monitor + for index, instance in enumerate(write_instances): + path = instance.data["path"] + folder, filename = os.path.split(path) + payload["JobInfo"]["OutputDirectory%d" % index] = folder + payload["JobInfo"]["OutputFilename%d" % index] = filename + + # Include critical variables with submission + keys = [ + # TODO: This won't work if the slaves don't have accesss to + # these paths, such as if slaves are running Linux and the + # submitter is on Windows. + "PYTHONPATH", + "NUKE_PATH" + # "OFX_PLUGIN_PATH", + ] + environment = dict({key: os.environ[key] for key in keys + if key in os.environ}, **api.Session) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) + + self.log.info("Submitting..") + self.log.info(json.dumps(payload, indent=4, sort_keys=True)) + + # E.g. http://192.168.0.1:8082/api/jobs + url = "{}/api/jobs".format(AVALON_DEADLINE) + response = requests.post(url, json=payload) + if not response.ok: + raise Exception(response.text) + + # Store the response for dependent job submission plug-ins + for instance in write_instances: + instance.data["deadlineSubmissionJob"] = response.json() diff --git a/pype/plugins/nuke/publish/validate_nuke_settings.py b/pype/plugins/nuke/_publish_unused/validate_nuke_settings.py similarity index 100% rename from pype/plugins/nuke/publish/validate_nuke_settings.py rename to pype/plugins/nuke/_publish_unused/validate_nuke_settings.py diff --git a/pype/plugins/nuke/publish/validate_proxy_mode.py b/pype/plugins/nuke/_publish_unused/validate_proxy_mode.py similarity index 100% rename from pype/plugins/nuke/publish/validate_proxy_mode.py rename to pype/plugins/nuke/_publish_unused/validate_proxy_mode.py diff --git a/pype/plugins/nuke/publish/validate_write_nodes.py b/pype/plugins/nuke/_publish_unused/validate_write_nodes.py similarity index 100% rename from pype/plugins/nuke/publish/validate_write_nodes.py rename to pype/plugins/nuke/_publish_unused/validate_write_nodes.py diff --git a/pype/plugins/nuke/create/create_backdrop b/pype/plugins/nuke/create/create_backdrop new file mode 100644 index 0000000000..2cdc222618 --- /dev/null +++ b/pype/plugins/nuke/create/create_backdrop @@ -0,0 +1,2 @@ +# creates backdrop which is published as separate nuke script +# it is versioned by major version diff --git a/pype/plugins/nuke/create/create_camera b/pype/plugins/nuke/create/create_camera new file mode 100644 index 0000000000..0d542b8ad7 --- /dev/null +++ b/pype/plugins/nuke/create/create_camera @@ -0,0 +1,3 @@ +# create vanilla camera if no camera is selected +# if camera is selected then it will convert it into containerized object +# it is major versioned in publish diff --git a/pype/plugins/nuke/create/create_read_plate b/pype/plugins/nuke/create/create_read_plate new file mode 100644 index 0000000000..90a47cb55e --- /dev/null +++ b/pype/plugins/nuke/create/create_read_plate @@ -0,0 +1,8 @@ +# create publishable read node usually used for enabling version tracking +# also useful for sharing across shots or assets + +# if read nodes are selected it will convert them to centainer +# if no read node selected it will create read node and offer browser to shot resource folder + +# type movie > mov or imagesequence +# type still > matpaint .psd, .tif, .png, diff --git a/pype/plugins/nuke/create/create_write b/pype/plugins/nuke/create/create_write new file mode 100644 index 0000000000..dcb132875a --- /dev/null +++ b/pype/plugins/nuke/create/create_write @@ -0,0 +1,17 @@ +# type: render +# if no render type node in script then first is having in name [master] for definition of main script renderer +# colorspace setting from templates +# dataflow setting from templates + +# type: mask_render +# created with shuffle gizmo for RGB separation into davinci matte +# colorspace setting from templates +# dataflow setting from templates + +# type: prerender +# backdrop with write and read +# colorspace setting from templates +# dataflow setting from templates + +# type: geo +# dataflow setting from templates diff --git a/pype/plugins/nuke/create/create_write.py b/pype/plugins/nuke/create/create_write.py new file mode 100644 index 0000000000..c1b492ac2e --- /dev/null +++ b/pype/plugins/nuke/create/create_write.py @@ -0,0 +1,149 @@ +from collections import OrderedDict +import avalon.api +import avalon.nuke +from pype.nuke import ( + create_write_node +) +from pype import api as pype + +import nuke + + +log = pype.Logger.getLogger(__name__, "nuke") + + +def subset_to_families(subset, family, families): + subset_sufx = str(subset).replace(family, "") + new_subset = families + subset_sufx + return "{}.{}".format(family, new_subset) + + +class CrateWriteRender(avalon.nuke.Creator): + # change this to template preset + preset = "render" + + name = "WriteRender" + label = "Create Write Render" + hosts = ["nuke"] + family = "{}_write".format(preset) + families = preset + icon = "sign-out" + + def __init__(self, *args, **kwargs): + super(CrateWriteRender, self).__init__(*args, **kwargs) + + data = OrderedDict() + + data["family"] = self.family.split("_")[1] + data["families"] = self.families + + {data.update({k: v}) for k, v in self.data.items() + if k not in data.keys()} + self.data = data + + def process(self): + self.name = self.data["subset"] + + family = self.family.split("_")[0] + node = self.family.split("_")[1] + + instance = nuke.toNode(self.data["subset"]) + + if not instance: + write_data = { + "class": node, + "preset": family, + "avalon": self.data + } + + create_write_node(self.data["subset"], write_data) + + return + + +class CrateWritePrerender(avalon.nuke.Creator): + # change this to template preset + preset = "prerender" + + name = "WritePrerender" + label = "Create Write Prerender" + hosts = ["nuke"] + family = "{}_write".format(preset) + families = preset + icon = "sign-out" + + def __init__(self, *args, **kwargs): + super(CrateWritePrerender, self).__init__(*args, **kwargs) + + data = OrderedDict() + + data["family"] = self.family.split("_")[1] + data["families"] = self.families + + {data.update({k: v}) for k, v in self.data.items() + if k not in data.keys()} + self.data = data + + def process(self): + self.name = self.data["subset"] + + instance = nuke.toNode(self.data["subset"]) + + family = self.family.split("_")[0] + node = self.family.split("_")[1] + + if not instance: + write_data = { + "class": node, + "preset": family, + "avalon": self.data + } + + create_write_node(self.data["subset"], write_data) + + return + + +class CrateWriteStill(avalon.nuke.Creator): + # change this to template preset + preset = "still" + + name = "WriteStill" + label = "Create Write Still" + hosts = ["nuke"] + family = "{}_write".format(preset) + families = preset + icon = "image" + + def __init__(self, *args, **kwargs): + super(CrateWriteStill, self).__init__(*args, **kwargs) + + data = OrderedDict() + + data["family"] = self.family.split("_")[1] + data["families"] = self.families + + {data.update({k: v}) for k, v in self.data.items() + if k not in data.keys()} + self.data = data + + def process(self): + self.name = self.data["subset"] + + instance = nuke.toNode(self.data["subset"]) + + family = self.family.split("_")[0] + node = self.family.split("_")[1] + + if not instance: + write_data = { + "frame_range": [nuke.frame(), nuke.frame()], + "class": node, + "preset": family, + "avalon": self.data + } + + nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame())) + create_write_node(self.data["subset"], write_data) + + return diff --git a/pype/plugins/nuke/create/create_write_exr.py b/pype/plugins/nuke/create/create_write_exr.py deleted file mode 100644 index 41cd528b15..0000000000 --- a/pype/plugins/nuke/create/create_write_exr.py +++ /dev/null @@ -1,48 +0,0 @@ -import os -import avalon.api -import avalon.nuke -import nuke - - -class CrateWriteExr(avalon.api.Creator): - name = "Write_exr" - label = "Create Write: exr" - hosts = ["nuke"] - family = "write" - icon = "sign-out" - - # def __init__(self, *args, **kwargs): - # super(CrateWriteExr, self).__init__(*args, **kwargs) - # self.data.setdefault("subset", "this") - - def process(self): - # nuke = getattr(sys.modules["__main__"], "nuke", None) - data = {} - ext = "exr" - - # todo: improve method of getting current environment - # todo: pref avalon.Session over os.environ - - workdir = os.path.normpath(os.environ["AVALON_WORKDIR"]) - - filename = "{}.####.exr".format(self.name) - filepath = os.path.join( - workdir, - "render", - ext, - filename - ).replace("\\", "/") - - with avalon.nuke.viewer_update_and_undo_stop(): - w = nuke.createNode( - "Write", - "name {}".format(self.name)) - # w.knob('colorspace').setValue() - w.knob('file').setValue(filepath) - w.knob('file_type').setValue(ext) - w.knob('datatype').setValue("16 bit half") - w.knob('compression').setValue("Zip (1 scanline)") - w.knob('create_directories').setValue(True) - w.knob('autocrop').setValue(True) - - return data diff --git a/pype/plugins/nuke/inventory/select_containers.py b/pype/plugins/nuke/inventory/select_containers.py index 89ac31d660..339e3a4992 100644 --- a/pype/plugins/nuke/inventory/select_containers.py +++ b/pype/plugins/nuke/inventory/select_containers.py @@ -1,7 +1,7 @@ from avalon import api -class NukeSelectContainers(api.InventoryAction): +class SelectContainers(api.InventoryAction): label = "Select Containers" icon = "mouse-pointer" diff --git a/pype/plugins/nuke/load/actions.py b/pype/plugins/nuke/load/actions.py index 94ae2999f6..f3b7748f01 100644 --- a/pype/plugins/nuke/load/actions.py +++ b/pype/plugins/nuke/load/actions.py @@ -5,7 +5,7 @@ from avalon import api -class NukeSetFrameRangeLoader(api.Loader): +class SetFrameRangeLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" families = ["animation", @@ -38,7 +38,7 @@ class NukeSetFrameRangeLoader(api.Loader): lib.update_frame_range(start, end) -class NukeSetFrameRangeWithHandlesLoader(api.Loader): +class SetFrameRangeWithHandlesLoader(api.Loader): """Specific loader of Alembic for the avalon.animation family""" families = ["animation", diff --git a/pype/plugins/nuke/load/load_alembic b/pype/plugins/nuke/load/load_alembic new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pype/plugins/nuke/load/load_backdrop b/pype/plugins/nuke/load/load_backdrop new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pype/plugins/nuke/load/load_camera_abc b/pype/plugins/nuke/load/load_camera_abc new file mode 100644 index 0000000000..e69de29bb2 diff --git a/pype/plugins/nuke/load/load_camera_nk b/pype/plugins/nuke/load/load_camera_nk new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/pype/plugins/nuke/load/load_camera_nk @@ -0,0 +1 @@ + diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py index 8d89998aa8..0b771a7007 100644 --- a/pype/plugins/nuke/load/load_sequence.py +++ b/pype/plugins/nuke/load/load_sequence.py @@ -118,7 +118,7 @@ def loader_shift(node, frame, relative=True): return int(shift) -class NukeLoadSequence(api.Loader): +class LoadSequence(api.Loader): """Load image sequence into Nuke""" families = ["imagesequence"] diff --git a/pype/plugins/nuke/load/load_still b/pype/plugins/nuke/load/load_still new file mode 100644 index 0000000000..c2aa061c5a --- /dev/null +++ b/pype/plugins/nuke/load/load_still @@ -0,0 +1 @@ +# usually used for mattepainting diff --git a/pype/plugins/nuke/publish/collect_current_file.py b/pype/plugins/nuke/publish/collect_current_file.py index 0d4867f08b..96ec44d9d6 100644 --- a/pype/plugins/nuke/publish/collect_current_file.py +++ b/pype/plugins/nuke/publish/collect_current_file.py @@ -1,18 +1,18 @@ import pyblish.api -class CollectCurrentFile(pyblish.api.ContextPlugin): +class SelectCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Current File" + order = pyblish.api.CollectorOrder hosts = ["nuke"] - families = ["workfile"] def process(self, context): import os import nuke current_file = nuke.root().name() + normalised = os.path.normpath(current_file) + context.data["current_file"] = normalised context.data["currentFile"] = normalised diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py new file mode 100644 index 0000000000..f1fa1276c2 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -0,0 +1,58 @@ +import os + +import nuke +import pyblish.api +from pype.nuke.lib import get_avalon_knob_data + + +@pyblish.api.log +class CollectNukeInstances(pyblish.api.ContextPlugin): + """Collect all nodes with Avalon knob.""" + + order = pyblish.api.CollectorOrder + label = "Collect Instances" + hosts = ["nuke", "nukeassist"] + + def process(self, context): + instances = [] + # creating instances per write node + for node in nuke.allNodes(): + + try: + if node["disable"].value(): + continue + except Exception: + continue + + # get data from avalon knob + avalon_knob_data = get_avalon_knob_data(node) + if not avalon_knob_data: + continue + subset = avalon_knob_data["subset"] + + # Create instance + instance = context.create_instance(subset) + instance.add(node) + + instance.data.update({ + "asset": os.environ["AVALON_ASSET"], + "label": node.name(), + "name": node.name(), + "subset": subset, + "families": [avalon_knob_data["families"]], + "family": avalon_knob_data["family"], + "publish": node.knob("publish").value() + }) + self.log.info("collected instance: {}".format(instance.data)) + instances.append(instance) + + context.data["instances"] = instances + + # Sort/grouped by family (preserving local index) + context[:] = sorted(context, key=self.sort_by_family) + + self.log.debug("context: {}".format(context)) + + def sort_by_family(self, instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py new file mode 100644 index 0000000000..db966fd84d --- /dev/null +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -0,0 +1,90 @@ +import os + +import nuke +import pyblish.api +import logging +log = logging.getLogger(__name__) + + +@pyblish.api.log +class CollectNukeWrites(pyblish.api.ContextPlugin): + """Collect all write nodes.""" + + order = pyblish.api.CollectorOrder + 0.1 + label = "Collect Writes" + hosts = ["nuke", "nukeassist"] + + def process(self, context): + for instance in context.data["instances"]: + self.log.debug("checking instance: {}".format(instance)) + node = instance[0] + + if node.Class() != "Write": + continue + + # Determine defined file type + ext = node["file_type"].value() + + # Determine output type + output_type = "img" + if ext == "mov": + output_type = "mov" + + # Get frame range + first_frame = int(nuke.root()["first_frame"].getValue()) + last_frame = int(nuke.root()["last_frame"].getValue()) + + if node["use_limit"].getValue(): + first_frame = int(node["first"].getValue()) + last_frame = int(node["last"].getValue()) + + # get path + path = nuke.filename(node) + output_dir = os.path.dirname(path) + self.log.debug('output dir: {}'.format(output_dir)) + # Include start and end render frame in label + name = node.name() + + label = "{0} ({1}-{2})".format( + name, + int(first_frame), + int(last_frame) + ) + + # preredered frames + if not node["render"].value(): + families = "prerendered.frames" + collected_frames = os.listdir(output_dir) + self.log.debug("collected_frames: {}".format(label)) + if "files" not in instance.data: + instance.data["files"] = list() + instance.data["files"].append(collected_frames) + instance.data['transfer'] = False + else: + # dealing with local/farm rendering + if node["render_farm"].value(): + families = "{}.farm".format(instance.data["families"][0]) + else: + families = "{}.local".format(instance.data["families"][0]) + + self.log.debug("checking for error: {}".format(label)) + instance.data.update({ + "path": path, + "outputDir": output_dir, + "ext": ext, + "label": label, + "families": [families], + "firstFrame": first_frame, + "lastFrame": last_frame, + "outputType": output_type, + "stagingDir": output_dir, + + }) + + self.log.debug("instance.data: {}".format(instance.data)) + + self.log.debug("context: {}".format(context)) + + def sort_by_family(self, instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) diff --git a/pype/plugins/nuke/publish/extract_output_directory.py b/pype/plugins/nuke/publish/extract_output_directory.py index 3064fad3c5..36ddb35e30 100644 --- a/pype/plugins/nuke/publish/extract_output_directory.py +++ b/pype/plugins/nuke/publish/extract_output_directory.py @@ -20,7 +20,7 @@ class ExtractOutputDirectory(pyblish.api.InstancePlugin): path = instance.data["collection"].format() if "output_path" in instance.data.keys(): - path = instance.data["output_path"] + path = instance.data["path"] if not path: return diff --git a/pype/plugins/nuke/publish/extract_script_save.py b/pype/plugins/nuke/publish/extract_script_save.py deleted file mode 100644 index b0eeb47886..0000000000 --- a/pype/plugins/nuke/publish/extract_script_save.py +++ /dev/null @@ -1,15 +0,0 @@ -import nuke -import pyblish.api - - -class ExtractScriptSave(pyblish.api.InstancePlugin): - """ Saves the script before extraction. """ - - order = pyblish.api.ExtractorOrder - 0.49 - label = "Script Save" - hosts = ["nuke"] - families = ["saver"] - - def process(self, instance): - - nuke.scriptSave() diff --git a/pype/plugins/nuke/publish/integrate_rendered_frames.py b/pype/plugins/nuke/publish/integrate_rendered_frames.py new file mode 100644 index 0000000000..f482a48cda --- /dev/null +++ b/pype/plugins/nuke/publish/integrate_rendered_frames.py @@ -0,0 +1,361 @@ +import os +import logging +import shutil + +import errno +import pyblish.api +from avalon import api, io + + +log = logging.getLogger(__name__) + + +class IntegrateFrames(pyblish.api.InstancePlugin): + """Resolve any dependency issies + + This plug-in resolves any paths which, if not updated might break + the published file. + + The order of families is important, when working with lookdev you want to + first publish the texture, update the texture paths in the nodes and then + publish the shading network. Same goes for file dependent assets. + """ + + label = "Integrate Frames" + order = pyblish.api.IntegratorOrder + families = ["prerendered.frames"] + + def process(self, instance): + + self.register(instance) + + self.log.info("Integrating Asset in to the database ...") + # self.integrate(instance) + + def register(self, instance): + + # Required environment variables + PROJECT = api.Session["AVALON_PROJECT"] + ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"] + LOCATION = api.Session["AVALON_LOCATION"] + + context = instance.context + # Atomicity + # + # Guarantee atomic publishes - each asset contains + # an identical set of members. + # __ + # / o + # / \ + # | o | + # \ / + # o __/ + # + assert all(result["success"] for result in context.data["results"]), ( + "Atomicity not held, aborting.") + + # Assemble + # + # | + # v + # ---> <---- + # ^ + # | + # + stagingdir = instance.data.get("stagingDir") + assert stagingdir, ("Incomplete instance \"%s\": " + "Missing reference to staging area." % instance) + + # extra check if stagingDir actually exists and is available + + self.log.debug("Establishing staging directory @ %s" % stagingdir) + + project = io.find_one({"type": "project"}, + projection={"config.template.publish": True}) + + asset = io.find_one({"type": "asset", + "name": ASSET, + "parent": project["_id"]}) + + assert all([project, asset]), ("Could not find current project or " + "asset '%s'" % ASSET) + + subset = self.get_subset(asset, instance) + + # get next version + latest_version = io.find_one({"type": "version", + "parent": subset["_id"]}, + {"name": True}, + sort=[("name", -1)]) + + next_version = 1 + if latest_version is not None: + next_version += latest_version["name"] + + self.log.info("Verifying version from assumed destination") + + assumed_data = instance.data["assumedTemplateData"] + assumed_version = assumed_data["version"] + if assumed_version != next_version: + raise AttributeError("Assumed version 'v{0:03d}' does not match" + "next version in database " + "('v{1:03d}')".format(assumed_version, + next_version)) + + self.log.debug("Next version: v{0:03d}".format(next_version)) + + version_data = self.create_version_data(context, instance) + version = self.create_version(subset=subset, + version_number=next_version, + locations=[LOCATION], + data=version_data) + + self.log.debug("Creating version ...") + version_id = io.insert_one(version).inserted_id + + # Write to disk + # _ + # | | + # _| |_ + # ____\ / + # |\ \ / \ + # \ \ v \ + # \ \________. + # \|________| + # + root = api.registered_root() + # template_data = {"root": root, + # "project": PROJECT, + # "silo": asset['silo'], + # "asset": ASSET, + # "subset": subset["name"], + # "version": version["name"]} + hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents'] + if hierarchy: + # hierarchy = os.path.sep.join(hierarchy) + hierarchy = os.path.join(*hierarchy) + + template_data = {"root": root, + "project": {"name": PROJECT, + "code": "prjX"}, + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "VERSION": version["name"], + "hierarchy": hierarchy} + + template_publish = project["config"]["template"]["publish"] + anatomy = instance.context.data['anatomy'] + + # Find the representations to transfer amongst the files + # Each should be a single representation (as such, a single extension) + representations = [] + + for files in instance.data["files"]: + + # Collection + # _______ + # |______|\ + # | |\| + # | || + # | || + # | || + # |_______| + # + if isinstance(files, list): + collection = files + # Assert that each member has identical suffix + _, ext = os.path.splitext(collection[0]) + assert all(ext == os.path.splitext(name)[1] + for name in collection), ( + "Files had varying suffixes, this is a bug" + ) + + assert not any(os.path.isabs(name) for name in collection) + + template_data["representation"] = ext[1:] + + for fname in collection: + + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + dst = anatomy_filled.publish.path + + # if instance.data.get('transfer', True): + # instance.data["transfers"].append([src, dst]) + + else: + # Single file + # _______ + # | |\ + # | | + # | | + # | | + # |_______| + # + fname = files + assert not os.path.isabs(fname), ( + "Given file name is a full path" + ) + _, ext = os.path.splitext(fname) + + template_data["representation"] = ext[1:] + + src = os.path.join(stagingdir, fname) + anatomy_filled = anatomy.format(template_data) + dst = anatomy_filled.publish.path + + + # if instance.data.get('transfer', True): + # dst = src + # instance.data["transfers"].append([src, dst]) + + representation = { + "schema": "pype:representation-2.0", + "type": "representation", + "parent": version_id, + "name": ext[1:], + "data": {'path': src}, + "dependencies": instance.data.get("dependencies", "").split(), + + # Imprint shortcut to context + # for performance reasons. + "context": { + "root": root, + "project": PROJECT, + "projectcode": "prjX", + 'task': api.Session["AVALON_TASK"], + "silo": asset['silo'], + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": version["name"], + "hierarchy": hierarchy, + "representation": ext[1:] + } + } + representations.append(representation) + + self.log.info("Registering {} items".format(len(representations))) + + io.insert_many(representations) + + def integrate(self, instance): + """Move the files + + Through `instance.data["transfers"]` + + Args: + instance: the instance to integrate + """ + + transfers = instance.data["transfers"] + + for src, dest in transfers: + self.log.info("Copying file .. {} -> {}".format(src, dest)) + self.copy_file(src, dest) + + def copy_file(self, src, dst): + """ Copy given source to destination + + Arguments: + src (str): the source file which needs to be copied + dst (str): the destination of the sourc file + Returns: + None + """ + + dirname = os.path.dirname(dst) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + self.log.critical("An unexpected error occurred.") + raise + + shutil.copy(src, dst) + + def get_subset(self, asset, instance): + + subset = io.find_one({"type": "subset", + "parent": asset["_id"], + "name": instance.data["subset"]}) + + if subset is None: + subset_name = instance.data["subset"] + self.log.info("Subset '%s' not found, creating.." % subset_name) + + _id = io.insert_one({ + "schema": "pype:subset-2.0", + "type": "subset", + "name": subset_name, + "data": {}, + "parent": asset["_id"] + }).inserted_id + + subset = io.find_one({"_id": _id}) + + return subset + + def create_version(self, subset, version_number, locations, data=None): + """ Copy given source to destination + + Args: + subset (dict): the registered subset of the asset + version_number (int): the version number + locations (list): the currently registered locations + + Returns: + dict: collection of data to create a version + """ + # Imprint currently registered location + version_locations = [location for location in locations if + location is not None] + + return {"schema": "pype:version-2.0", + "type": "version", + "parent": subset["_id"], + "name": version_number, + "locations": version_locations, + "data": data} + + def create_version_data(self, context, instance): + """Create the data collection for the version + + Args: + context: the current context + instance: the current instance being published + + Returns: + dict: the required information with instance.data as key + """ + + families = [] + current_families = instance.data.get("families", list()) + instance_family = instance.data.get("family", None) + + if instance_family is not None: + families.append(instance_family) + families += current_families + + # create relative source path for DB + relative_path = os.path.relpath(context.data["currentFile"], + api.registered_root()) + source = os.path.join("{root}", relative_path).replace("\\", "/") + + version_data = {"families": families, + "time": context.data["time"], + "author": context.data["user"], + "source": source, + "comment": context.data.get("comment")} + + # Include optional data if present in + optionals = ["startFrame", "endFrame", "step", "handles"] + for key in optionals: + if key in instance.data: + version_data[key] = instance.data[key] + + return version_data diff --git a/pype/plugins/nuke/publish/render_local.py b/pype/plugins/nuke/publish/render_local.py new file mode 100644 index 0000000000..55adedb9e5 --- /dev/null +++ b/pype/plugins/nuke/publish/render_local.py @@ -0,0 +1,48 @@ +import pyblish.api +import nuke + + +class NukeRenderLocal(pyblish.api.InstancePlugin): + # TODO: rewrite docstring to nuke + """Render the current Fusion composition locally. + + Extract the result of savers by starting a comp render + This will run the local render of Fusion. + + """ + + order = pyblish.api.ExtractorOrder + label = "Render Local" + hosts = ["nuke"] + families = ["render.local", "prerender.local", "still.local"] + + def process(self, instance): + + # This should be a ContextPlugin, but this is a workaround + # for a bug in pyblish to run once for a family: issue #250 + context = instance.context + key = "__hasRun{}".format(self.__class__.__name__) + if context.data.get(key, False): + return + else: + context.data[key] = True + + self.log.debug("instance collected: {}".format(instance.data)) + + first_frame = instance.data.get("firstFrame", None) + last_frame = instance.data.get("lastFrame", None) + node_subset_name = instance.data.get("name", None) + + self.log.info("Starting render") + self.log.info("Start frame: {}".format(first_frame)) + self.log.info("End frame: {}".format(last_frame)) + + # Render frames + nuke.execute( + node_subset_name, + int(first_frame), + int(last_frame) + ) + # swith to prerendered.frames + instance[0]["render"].setValue(False) + self.log.info('Finished render') diff --git a/pype/plugins/nuke/publish/script_save.py b/pype/plugins/nuke/publish/script_save.py new file mode 100644 index 0000000000..472742f464 --- /dev/null +++ b/pype/plugins/nuke/publish/script_save.py @@ -0,0 +1,15 @@ +import nuke +import pyblish.api + + +class ExtractScriptSave(pyblish.api.Extractor): + """ + """ + label = 'Script Save' + order = pyblish.api.Extractor.order - 0.45 + hosts = ['nuke'] + + def process(self, instance): + + self.log.info('saving script') + nuke.scriptSave() diff --git a/pype/plugins/nuke/publish/validate_collection.py b/pype/plugins/nuke/publish/validate_collection.py new file mode 100644 index 0000000000..4088272bc4 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_collection.py @@ -0,0 +1,53 @@ +import os +import pyblish.api +import clique + + +@pyblish.api.log +class RepairCollectionAction(pyblish.api.Action): + label = "Repair" + on = "failed" + icon = "wrench" + + def process(self, context, plugin): + + files_remove = [os.path.join(context[0].data["outputDir"], f) + for f in context[0].data["files"]] + for f in files_remove: + os.remove(f) + self.log.debug("removing file: {}".format(f)) + context[0][0]["render"].setValue(True) + self.log.info("Rendering toggled ON") + + +class ValidateCollection(pyblish.api.InstancePlugin): + """ Validates file output. """ + + order = pyblish.api.ValidatorOrder + # optional = True + families = ['prerendered.frames'] + label = "Check prerendered frames" + hosts = ["nuke"] + actions = [RepairCollectionAction] + + def process(self, instance): + self.log.debug('instance.data["files"]: {}'.format(instance.data['files'])) + collections, remainder = clique.assemble(*instance.data['files']) + self.log.info('collections: {}'.format(str(collections))) + + frame_length = instance.data["lastFrame"] \ + - instance.data["firstFrame"] + 1 + + if frame_length is not 1: + assert len(collections) == 1, self.log.info( + "There are multiple collections in the folder") + assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing") + + assert remainder is not None, self.log.info("There are some extra files in folder") + + self.log.info('frame_length: {}'.format(frame_length)) + self.log.info('len(list(instance.data["files"])): {}'.format( + len(list(instance.data["files"][0])))) + + assert len(list(instance.data["files"][0])) is frame_length, self.log.info( + "{} missing frames. Use repair to render all frames".format(__name__)) diff --git a/pype/plugins/nuke/publish/validate_prerenders_output.py b/pype/plugins/nuke/publish/validate_prerenders_output.py deleted file mode 100644 index 412c55ac0a..0000000000 --- a/pype/plugins/nuke/publish/validate_prerenders_output.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -import pyblish.api - - -@pyblish.api.log -class ValidatePrerendersOutput(pyblish.api.Validator): - """Validates that the output directory for the write nodes exists""" - - families = ['write.prerender'] - hosts = ['nuke'] - label = 'Pre-renders output' - - def process(self, instance): - path = os.path.dirname(instance[0]['file'].value()) - - if 'output' not in path: - name = instance[0].name() - msg = 'Output directory for %s is not in an "output" folder.' % name - - raise ValueError(msg) diff --git a/pype/templates.py b/pype/templates.py new file mode 100644 index 0000000000..7e4b962d52 --- /dev/null +++ b/pype/templates.py @@ -0,0 +1,100 @@ +import os +import re +from avalon import io +from app.api import (Templates, Logger, format) +log = Logger.getLogger(__name__, + os.getenv("AVALON_APP", "pype-config")) + + +def load_data_from_templates(): + from . import api + if not any([ + api.Dataflow, + api.Anatomy, + api.Colorspace, + api.Metadata + ] + ): + # base = Templates() + t = Templates(type=["anatomy", "metadata", "dataflow", "colorspace"]) + api.Anatomy = t.anatomy + api.Metadata = t.metadata.format() + data = {"metadata": api.Metadata} + api.Dataflow = t.dataflow.format(data) + api.Colorspace = t.colorspace + log.info("Data from templates were Loaded...") + + +def reset_data_from_templates(): + from . import api + api.Dataflow = None + api.Anatomy = None + api.Colorspace = None + api.Metadata = None + log.info("Data from templates were Unloaded...") + + +def get_version_from_workfile(file): + pattern = re.compile(r"_v([0-9]*)") + try: + v_string = pattern.findall(file)[0] + return v_string + except IndexError: + log.error("templates:get_version_from_workfile:" + "`{}` missing version string." + "Example `v004`".format(file)) + + +def get_project_code(): + return io.find_one({"type": "project"})["data"]["code"] + + +def get_project_name(): + project_name = os.getenv("AVALON_PROJECT", None) + assert project_name, log.error("missing `AVALON_PROJECT`" + "in environment variables") + return project_name + + +def get_asset(): + asset = os.getenv("AVALON_ASSET", None) + assert asset, log.error("missing `AVALON_ASSET`" + "in environment variables") + return asset + + +def get_task(): + task = os.getenv("AVALON_TASK", None) + assert task, log.error("missing `AVALON_TASK`" + "in environment variables") + return task + + +def get_hiearchy(): + hierarchy = io.find_one({ + "type": 'asset', + "name": get_asset()} + )['data']['parents'] + + if hierarchy: + # hierarchy = os.path.sep.join(hierarchy) + return os.path.join(*hierarchy) + + +def fill_avalon_workdir(): + awd = os.getenv("AVALON_WORKDIR", None) + assert awd, log.error("missing `AVALON_WORKDIR`" + "in environment variables") + if "{" not in awd: + return + + data = { + "hierarchy": get_hiearchy(), + "task": get_task(), + "asset": get_asset(), + "project": {"name": get_project_name(), + "code": get_project_code()}} + + awd_filled = os.path.normpath(format(awd, data)) + os.environ["AVALON_WORKDIR"] = awd_filled + log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled)) diff --git a/pype/vendor/backports/__init__.py b/pype/vendor/backports/__init__.py new file mode 100644 index 0000000000..69e3be50da --- /dev/null +++ b/pype/vendor/backports/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/pype/vendor/backports/configparser/__init__.py b/pype/vendor/backports/configparser/__init__.py new file mode 100644 index 0000000000..06d7a0855f --- /dev/null +++ b/pype/vendor/backports/configparser/__init__.py @@ -0,0 +1,1390 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Configuration file parser. + +A configuration file consists of sections, lead by a "[section]" header, +and followed by "name: value" entries, with continuations and such in +the style of RFC 822. + +Intrinsic defaults can be specified by passing them into the +ConfigParser constructor as a dictionary. + +class: + +ConfigParser -- responsible for parsing a list of + configuration files, and managing the parsed database. + + methods: + + __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, + delimiters=('=', ':'), comment_prefixes=('#', ';'), + inline_comment_prefixes=None, strict=True, + empty_lines_in_values=True, default_section='DEFAULT', + interpolation=, converters=): + Create the parser. When `defaults' is given, it is initialized into the + dictionary or intrinsic defaults. The keys must be strings, the values + must be appropriate for %()s string interpolation. + + When `dict_type' is given, it will be used to create the dictionary + objects for the list of sections, for the options within a section, and + for the default values. + + When `delimiters' is given, it will be used as the set of substrings + that divide keys from values. + + When `comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in empty lines. Comments can be + indented. + + When `inline_comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in non-empty lines. + + When `strict` is True, the parser won't allow for any section or option + duplicates while reading from a single source (file, string or + dictionary). Default is True. + + When `empty_lines_in_values' is False (default: True), each empty line + marks the end of an option. Otherwise, internal empty lines of + a multiline option are kept as part of the value. + + When `allow_no_value' is True (default: False), options without + values are accepted; the value presented for these is None. + + sections() + Return all the configuration section names, sans DEFAULT. + + has_section(section) + Return whether the given section exists. + + has_option(section, option) + Return whether the given option exists in the given section. + + options(section) + Return list of configuration options for the named section. + + read(filenames, encoding=None) + Read and parse the list of named configuration files, given by + name. A single filename is also allowed. Non-existing files + are ignored. Return list of successfully read files. + + read_file(f, filename=None) + Read and parse one configuration file, given as a file object. + The filename defaults to f.name; it is only used in error + messages (if f has no `name' attribute, the string `' is used). + + read_string(string) + Read configuration from a given string. + + read_dict(dictionary) + Read configuration from a dictionary. Keys are section names, + values are dictionaries with keys and values that should be present + in the section. If the used dictionary type preserves order, sections + and their keys will be added in order. Values are automatically + converted to strings. + + get(section, option, raw=False, vars=None, fallback=_UNSET) + Return a string value for the named option. All % interpolations are + expanded in the return values, based on the defaults passed into the + constructor and the DEFAULT section. Additional substitutions may be + provided using the `vars' argument, which must be a dictionary whose + contents override any pre-existing defaults. If `option' is a key in + `vars', the value from `vars' is used. + + getint(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to an integer. + + getfloat(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a float. + + getboolean(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a boolean (currently case + insensitively defined as 0, false, no, off for False, and 1, true, + yes, on for True). Returns False or True. + + items(section=_UNSET, raw=False, vars=None) + If section is given, return a list of tuples with (name, value) for + each option in the section. Otherwise, return a list of tuples with + (section_name, section_proxy) for each section, including DEFAULTSECT. + + remove_section(section) + Remove the given file section and all its options. + + remove_option(section, option) + Remove the given option from the given section. + + set(section, option, value) + Set the given option. + + write(fp, space_around_delimiters=True) + Write the configuration state in .ini format. If + `space_around_delimiters' is True (the default), delimiters + between keys and values are surrounded by spaces. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from collections import MutableMapping +import functools +import io +import itertools +import re +import sys +import warnings + +from backports.configparser.helpers import OrderedDict as _default_dict +from backports.configparser.helpers import ChainMap as _ChainMap +from backports.configparser.helpers import from_none, open, str, PY2 + +__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError", + "NoOptionError", "InterpolationError", "InterpolationDepthError", + "InterpolationMissingOptionError", "InterpolationSyntaxError", + "ParsingError", "MissingSectionHeaderError", + "ConfigParser", "SafeConfigParser", "RawConfigParser", + "Interpolation", "BasicInterpolation", "ExtendedInterpolation", + "LegacyInterpolation", "SectionProxy", "ConverterMapping", + "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"] + +DEFAULTSECT = "DEFAULT" + +MAX_INTERPOLATION_DEPTH = 10 + + +# exception classes +class Error(Exception): + """Base class for ConfigParser exceptions.""" + + def __init__(self, msg=''): + self.message = msg + Exception.__init__(self, msg) + + def __repr__(self): + return self.message + + __str__ = __repr__ + + +class NoSectionError(Error): + """Raised when no section matches a requested option.""" + + def __init__(self, section): + Error.__init__(self, 'No section: %r' % (section,)) + self.section = section + self.args = (section, ) + + +class DuplicateSectionError(Error): + """Raised when a section is repeated in an input source. + + Possible repetitions that raise this exception are: multiple creation + using the API or in strict parsers when a section is found more than once + in a single input file, string or dictionary. + """ + + def __init__(self, section, source=None, lineno=None): + msg = [repr(section), " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": section ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Section ") + Error.__init__(self, "".join(msg)) + self.section = section + self.source = source + self.lineno = lineno + self.args = (section, source, lineno) + + +class DuplicateOptionError(Error): + """Raised by strict parsers when an option is repeated in an input source. + + Current implementation raises this exception only when an option is found + more than once in a single file, string or dictionary. + """ + + def __init__(self, section, option, source=None, lineno=None): + msg = [repr(option), " in section ", repr(section), + " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": option ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Option ") + Error.__init__(self, "".join(msg)) + self.section = section + self.option = option + self.source = source + self.lineno = lineno + self.args = (section, option, source, lineno) + + +class NoOptionError(Error): + """A requested option was not found.""" + + def __init__(self, option, section): + Error.__init__(self, "No option %r in section: %r" % + (option, section)) + self.option = option + self.section = section + self.args = (option, section) + + +class InterpolationError(Error): + """Base class for interpolation-related exceptions.""" + + def __init__(self, option, section, msg): + Error.__init__(self, msg) + self.option = option + self.section = section + self.args = (option, section, msg) + + +class InterpolationMissingOptionError(InterpolationError): + """A string substitution required a setting which was not available.""" + + def __init__(self, option, section, rawval, reference): + msg = ("Bad value substitution: option {0!r} in section {1!r} contains " + "an interpolation key {2!r} which is not a valid option name. " + "Raw value: {3!r}".format(option, section, reference, rawval)) + InterpolationError.__init__(self, option, section, msg) + self.reference = reference + self.args = (option, section, rawval, reference) + + +class InterpolationSyntaxError(InterpolationError): + """Raised when the source text contains invalid syntax. + + Current implementation raises this exception when the source text into + which substitutions are made does not conform to the required syntax. + """ + + +class InterpolationDepthError(InterpolationError): + """Raised when substitutions are nested too deeply.""" + + def __init__(self, option, section, rawval): + msg = ("Recursion limit exceeded in value substitution: option {0!r} " + "in section {1!r} contains an interpolation key which " + "cannot be substituted in {2} steps. Raw value: {3!r}" + "".format(option, section, MAX_INTERPOLATION_DEPTH, + rawval)) + InterpolationError.__init__(self, option, section, msg) + self.args = (option, section, rawval) + + +class ParsingError(Error): + """Raised when a configuration file does not follow legal syntax.""" + + def __init__(self, source=None, filename=None): + # Exactly one of `source'/`filename' arguments has to be given. + # `filename' kept for compatibility. + if filename and source: + raise ValueError("Cannot specify both `filename' and `source'. " + "Use `source'.") + elif not filename and not source: + raise ValueError("Required argument `source' not given.") + elif filename: + source = filename + Error.__init__(self, 'Source contains parsing errors: %r' % source) + self.source = source + self.errors = [] + self.args = (source, ) + + @property + def filename(self): + """Deprecated, use `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + return self.source + + @filename.setter + def filename(self, value): + """Deprecated, user `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + self.source = value + + def append(self, lineno, line): + self.errors.append((lineno, line)) + self.message += '\n\t[line %2d]: %s' % (lineno, line) + + +class MissingSectionHeaderError(ParsingError): + """Raised when a key-value pair is found before any section header.""" + + def __init__(self, filename, lineno, line): + Error.__init__( + self, + 'File contains no section headers.\nfile: %r, line: %d\n%r' % + (filename, lineno, line)) + self.source = filename + self.lineno = lineno + self.line = line + self.args = (filename, lineno, line) + + +# Used in parser getters to indicate the default behaviour when a specific +# option is not found it to raise an exception. Created to enable `None' as +# a valid fallback value. +_UNSET = object() + + +class Interpolation(object): + """Dummy interpolation that passes the value through with no changes.""" + + def before_get(self, parser, section, option, value, defaults): + return value + + def before_set(self, parser, section, option, value): + return value + + def before_read(self, parser, section, option, value): + return value + + def before_write(self, parser, section, option, value): + return value + + +class BasicInterpolation(Interpolation): + """Interpolation as implemented in the classic ConfigParser. + + The option values can contain format strings which refer to other values in + the same section, or values in the special default section. + + For example: + + something: %(dir)s/whatever + + would resolve the "%(dir)s" to the value of dir. All reference + expansions are done late, on demand. If a user needs to use a bare % in + a configuration file, she can escape it by writing %%. Other % usage + is considered a user error and raises `InterpolationSyntaxError'.""" + + _KEYCRE = re.compile(r"%\(([^)]+)\)s") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('%%', '') # escaped percent signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '%' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('%'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("%") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "%": + accum.append("%") + rest = rest[2:] + elif c == "(": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + var = parser.optionxform(m.group(1)) + rest = rest[m.end():] + try: + v = map[var] + except KeyError: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, var)) + if "%" in v: + self._interpolate_some(parser, option, accum, v, + section, map, depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'%%' must be followed by '%%' or '(', " + "found: %r" % (rest,)) + + +class ExtendedInterpolation(Interpolation): + """Advanced variant of interpolation, supports the syntax used by + `zc.buildout'. Enables interpolation between sections.""" + + _KEYCRE = re.compile(r"\$\{([^}]+)\}") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('$$', '') # escaped dollar signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '$' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('$'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("$") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "$": + accum.append("$") + rest = rest[2:] + elif c == "{": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + path = m.group(1).split(':') + rest = rest[m.end():] + sect = section + opt = option + try: + if len(path) == 1: + opt = parser.optionxform(path[0]) + v = map[opt] + elif len(path) == 2: + sect = path[0] + opt = parser.optionxform(path[1]) + v = parser.get(sect, opt, raw=True) + else: + raise InterpolationSyntaxError( + option, section, + "More than one ':' found: %r" % (rest,)) + except (KeyError, NoSectionError, NoOptionError): + raise from_none(InterpolationMissingOptionError( + option, section, rawval, ":".join(path))) + if "$" in v: + self._interpolate_some(parser, opt, accum, v, sect, + dict(parser.items(sect, raw=True)), + depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'$' must be followed by '$' or '{', " + "found: %r" % (rest,)) + + +class LegacyInterpolation(Interpolation): + """Deprecated interpolation used in old versions of ConfigParser. + Use BasicInterpolation or ExtendedInterpolation instead.""" + + _KEYCRE = re.compile(r"%\(([^)]*)\)s|.") + + def before_get(self, parser, section, option, value, vars): + rawval = value + depth = MAX_INTERPOLATION_DEPTH + while depth: # Loop through this until it's done + depth -= 1 + if value and "%(" in value: + replace = functools.partial(self._interpolation_replace, + parser=parser) + value = self._KEYCRE.sub(replace, value) + try: + value = value % vars + except KeyError as e: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, e.args[0])) + else: + break + if value and "%(" in value: + raise InterpolationDepthError(option, section, rawval) + return value + + def before_set(self, parser, section, option, value): + return value + + @staticmethod + def _interpolation_replace(match, parser): + s = match.group(1) + if s is None: + return match.group() + else: + return "%%(%s)s" % parser.optionxform(s) + + +class RawConfigParser(MutableMapping): + """ConfigParser that does not do interpolation.""" + + # Regular expressions for parsing section headers and options + _SECT_TMPL = r""" + \[ # [ + (?P
[^]]+) # very permissive! + \] # ] + """ + _OPT_TMPL = r""" + (?P