diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py
new file mode 100644
index 0000000000..9d0ce2071f
--- /dev/null
+++ b/pype/ftrack/actions/action_cust_attr_doctor.py
@@ -0,0 +1,334 @@
+import os
+import sys
+import json
+import argparse
+import logging
+
+from pype.vendor import ftrack_api
+from pype.ftrack import BaseAction
+
+
+class CustomAttributeDoctor(BaseAction):
+ #: Action identifier.
+ identifier = 'custom.attributes.doctor'
+ #: Action label.
+ label = 'Custom Attributes Doctor'
+ #: Action description.
+ description = (
+ 'Fix hierarchical custom attributes mainly handles, fstart'
+ ' and fend'
+ )
+
+ icon = '{}/ftrack/action_icons/TestAction.svg'.format(
+ os.environ.get('PYPE_STATICS_SERVER', '')
+ )
+ hierarchical_ca = ['handle_start', 'handle_end', 'fstart', 'fend']
+ hierarchical_alternatives = {
+ 'handle_start': 'handles',
+ 'handle_end': 'handles'
+ }
+
+ # Roles for new custom attributes
+ read_roles = ['ALL',]
+ write_roles = ['ALL',]
+
+ data_ca = {
+ 'handle_start': {
+ 'label': 'Frame handles start',
+ 'type': 'number',
+ 'config': json.dumps({'isdecimal': False})
+ },
+ 'handle_end': {
+ 'label': 'Frame handles end',
+ 'type': 'number',
+ 'config': json.dumps({'isdecimal': False})
+ },
+ 'fstart': {
+ 'label': 'Frame start',
+ 'type': 'number',
+ 'config': json.dumps({'isdecimal': False})
+ },
+ 'fend': {
+ 'label': 'Frame end',
+ 'type': 'number',
+ 'config': json.dumps({'isdecimal': False})
+ }
+ }
+
+ def discover(self, session, entities, event):
+ ''' Validation '''
+
+ return True
+
+ def interface(self, session, entities, event):
+ if event['data'].get('values', {}):
+ return
+
+ title = 'Select Project to fix Custom attributes'
+
+ items = []
+ item_splitter = {'type': 'label', 'value': '---'}
+
+ all_projects = session.query('Project').all()
+ for project in all_projects:
+ item_label = {
+ 'type': 'label',
+ 'value': '{} ({})'.format(
+ project['full_name'], project['name']
+ )
+ }
+ item = {
+ 'name': project['id'],
+ 'type': 'boolean',
+ 'value': False
+ }
+ if len(items) > 0:
+ items.append(item_splitter)
+ items.append(item_label)
+ items.append(item)
+
+ if len(items) == 0:
+ return {
+ 'success': False,
+ 'message': 'Didn\'t found any projects'
+ }
+ else:
+ return {
+ 'items': items,
+ 'title': title
+ }
+
+ def launch(self, session, entities, event):
+ if 'values' not in event['data']:
+ return
+
+ values = event['data']['values']
+ projects_to_update = []
+ for project_id, update_bool in values.items():
+ if not update_bool:
+ continue
+
+ project = session.query(
+ 'Project where id is "{}"'.format(project_id)
+ ).one()
+ projects_to_update.append(project)
+
+ if not projects_to_update:
+ self.log.debug('Nothing to update')
+ return {
+ 'success': True,
+ 'message': 'Nothing to update'
+ }
+
+ self.security_roles = {}
+ self.to_process = {}
+ # self.curent_default_values = {}
+ existing_attrs = session.query('CustomAttributeConfiguration').all()
+ self.prepare_custom_attributes(existing_attrs)
+
+ self.projects_data = {}
+ for project in projects_to_update:
+ self.process_data(project)
+
+ return True
+
+ def process_data(self, entity):
+ cust_attrs = entity.get('custom_attributes')
+ if not cust_attrs:
+ return
+ for dst_key, src_key in self.to_process.items():
+ if src_key in cust_attrs:
+ value = cust_attrs[src_key]
+ entity['custom_attributes'][dst_key] = value
+ self.session.commit()
+
+ for child in entity.get('children', []):
+ self.process_data(child)
+
+ def prepare_custom_attributes(self, existing_attrs):
+ to_process = {}
+ to_create = []
+ all_keys = {attr['key']: attr for attr in existing_attrs}
+ for key in self.hierarchical_ca:
+ if key not in all_keys:
+ self.log.debug(
+ 'Custom attribute "{}" does not exist at all'.format(key)
+ )
+ to_create.append(key)
+ if key in self.hierarchical_alternatives:
+ alt_key = self.hierarchical_alternatives[key]
+ if alt_key in all_keys:
+ self.log.debug((
+ 'Custom attribute "{}" will use values from "{}"'
+ ).format(key, alt_key))
+
+ to_process[key] = alt_key
+
+ obj = all_keys[alt_key]
+ # if alt_key not in self.curent_default_values:
+ # self.curent_default_values[alt_key] = obj['default']
+ obj['default'] = None
+ self.session.commit()
+
+ else:
+ obj = all_keys[key]
+ new_key = key + '_old'
+
+ if obj['is_hierarchical']:
+ if new_key not in all_keys:
+ self.log.info((
+ 'Custom attribute "{}" is already hierarchical'
+ ' and can\'t find old one'
+ ).format(key)
+ )
+ continue
+
+ to_process[key] = new_key
+ continue
+
+ # default_value = obj['default']
+ # if new_key not in self.curent_default_values:
+ # self.curent_default_values[new_key] = default_value
+
+ obj['key'] = new_key
+ obj['label'] = obj['label'] + '(old)'
+ obj['default'] = None
+
+ self.session.commit()
+
+ to_create.append(key)
+ to_process[key] = new_key
+
+ self.to_process = to_process
+ for key in to_create:
+ data = {
+ 'key': key,
+ 'entity_type': 'show',
+ 'is_hierarchical': True,
+ 'default': None
+ }
+ for _key, _value in self.data_ca.get(key, {}).items():
+ if _key == 'type':
+ _value = self.session.query((
+ 'CustomAttributeType where name is "{}"'
+ ).format(_value)).first()
+
+ data[_key] = _value
+
+ avalon_group = self.session.query(
+ 'CustomAttributeGroup where name is "avalon"'
+ ).first()
+ if avalon_group:
+ data['group'] = avalon_group
+
+ read_roles = self.get_security_role(self.read_roles)
+ write_roles = self.get_security_role(self.write_roles)
+ data['read_security_roles'] = read_roles
+ data['write_security_roles'] = write_roles
+
+ self.session.create('CustomAttributeConfiguration', data)
+ self.session.commit()
+
+ # def return_back_defaults(self):
+ # existing_attrs = self.session.query(
+ # 'CustomAttributeConfiguration'
+ # ).all()
+ #
+ # for attr_key, default in self.curent_default_values.items():
+ # for attr in existing_attrs:
+ # if attr['key'] != attr_key:
+ # continue
+ # attr['default'] = default
+ # self.session.commit()
+ # break
+
+ def get_security_role(self, security_roles):
+ roles = []
+ if len(security_roles) == 0 or security_roles[0] == 'ALL':
+ roles = self.get_role_ALL()
+ elif security_roles[0] == 'except':
+ excepts = security_roles[1:]
+ all = self.get_role_ALL()
+ for role in all:
+ if role['name'] not in excepts:
+ roles.append(role)
+ if role['name'] not in self.security_roles:
+ self.security_roles[role['name']] = role
+ else:
+ for role_name in security_roles:
+ if role_name in self.security_roles:
+ roles.append(self.security_roles[role_name])
+ continue
+
+ try:
+ query = 'SecurityRole where name is "{}"'.format(role_name)
+ role = self.session.query(query).one()
+ self.security_roles[role_name] = role
+ roles.append(role)
+ except Exception:
+ self.log.warning(
+ 'Securit role "{}" does not exist'.format(role_name)
+ )
+ continue
+
+ return roles
+
+ def get_role_ALL(self):
+ role_name = 'ALL'
+ if role_name in self.security_roles:
+ all_roles = self.security_roles[role_name]
+ else:
+ all_roles = self.session.query('SecurityRole').all()
+ self.security_roles[role_name] = all_roles
+ for role in all_roles:
+ if role['name'] not in self.security_roles:
+ self.security_roles[role['name']] = role
+ return all_roles
+
+
+def register(session, **kw):
+ '''Register plugin. Called when used as an plugin.'''
+
+ if not isinstance(session, ftrack_api.session.Session):
+ return
+
+ CustomAttributeDoctor(session).register()
+
+
+def main(arguments=None):
+ '''Set up logging and register action.'''
+ if arguments is None:
+ arguments = []
+
+ parser = argparse.ArgumentParser()
+ # Allow setting of logging level from arguments.
+ loggingLevels = {}
+ for level in (
+ logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
+ logging.ERROR, logging.CRITICAL
+ ):
+ loggingLevels[logging.getLevelName(level).lower()] = level
+
+ parser.add_argument(
+ '-v', '--verbosity',
+ help='Set the logging output verbosity.',
+ choices=loggingLevels.keys(),
+ default='info'
+ )
+ namespace = parser.parse_args(arguments)
+
+ # Set up basic logging
+ logging.basicConfig(level=loggingLevels[namespace.verbosity])
+
+ session = ftrack_api.Session()
+ register(session)
+
+ # Wait for events
+ logging.info(
+ 'Registered actions and listening for events. Use Ctrl-C to abort.'
+ )
+ session.event_hub.wait()
+
+
+if __name__ == '__main__':
+ raise SystemExit(main(sys.argv[1:]))
diff --git a/pype/ftrack/actions/action_sync_hier_attrs.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py
similarity index 58%
rename from pype/ftrack/actions/action_sync_hier_attrs.py
rename to pype/ftrack/actions/action_sync_hier_attrs_local.py
index 3a884a017f..c6b12028bc 100644
--- a/pype/ftrack/actions/action_sync_hier_attrs.py
+++ b/pype/ftrack/actions/action_sync_hier_attrs_local.py
@@ -1,5 +1,6 @@
import os
import sys
+import json
import argparse
import logging
import collections
@@ -16,13 +17,13 @@ class SyncHierarchicalAttrs(BaseAction):
ca_mongoid = lib.get_ca_mongoid()
#: Action identifier.
- identifier = 'sync.hierarchical.attrs'
+ identifier = 'sync.hierarchical.attrs.local'
#: Action label.
- label = 'Sync hierarchical attributes'
+ label = 'Sync HierAttrs - Local'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
- icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format(
+ icon = '{}/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
@@ -40,67 +41,92 @@ class SyncHierarchicalAttrs(BaseAction):
return False
def launch(self, session, entities, event):
- # Collect hierarchical attrs
- custom_attributes = {}
- all_avalon_attr = session.query(
- 'CustomAttributeGroup where name is "avalon"'
+ user = session.query(
+ 'User where id is "{}"'.format(event['source']['user']['id'])
).one()
- for cust_attr in all_avalon_attr['custom_attribute_configurations']:
- if 'avalon_' in cust_attr['key']:
- continue
- if not cust_attr['is_hierarchical']:
- continue
+ job = session.create('Job', {
+ 'user': user,
+ 'status': 'running',
+ 'data': json.dumps({
+ 'description': 'Sync Hierachical attributes'
+ })
+ })
+ session.commit()
- if cust_attr['default']:
- self.log.warning((
- 'Custom attribute "{}" has set default value.'
- ' This attribute can\'t be synchronized'
- ).format(cust_attr['label']))
- continue
-
- custom_attributes[cust_attr['key']] = cust_attr
-
- if not custom_attributes:
- msg = 'No hierarchical attributes to sync.'
- self.log.debug(msg)
- return {
- 'success': True,
- 'message': msg
- }
-
- entity = entities[0]
- if entity.entity_type.lower() == 'project':
- project_name = entity['full_name']
- else:
- project_name = entity['project']['full_name']
-
- self.db_con.install()
- self.db_con.Session['AVALON_PROJECT'] = project_name
-
- for entity in entities:
- for key in custom_attributes:
- # check if entity has that attribute
- if key not in entity['custom_attributes']:
- self.log.debug(
- 'Hierachical attribute "{}" not found on "{}"'.format(
- key, entity.get('name', entity)
- )
- )
+ try:
+ # Collect hierarchical attrs
+ custom_attributes = {}
+ all_avalon_attr = session.query(
+ 'CustomAttributeGroup where name is "avalon"'
+ ).one()
+ for cust_attr in all_avalon_attr['custom_attribute_configurations']:
+ if 'avalon_' in cust_attr['key']:
continue
- value = self.get_hierarchical_value(key, entity)
- if value is None:
- self.log.warning(
- 'Hierarchical attribute "{}" not set on "{}"'.format(
- key, entity.get('name', entity)
- )
- )
+ if not cust_attr['is_hierarchical']:
continue
- self.update_hierarchical_attribute(entity, key, value)
+ if cust_attr['default']:
+ self.log.warning((
+ 'Custom attribute "{}" has set default value.'
+ ' This attribute can\'t be synchronized'
+ ).format(cust_attr['label']))
+ continue
- self.db_con.uninstall()
+ custom_attributes[cust_attr['key']] = cust_attr
+
+ if not custom_attributes:
+ msg = 'No hierarchical attributes to sync.'
+ self.log.debug(msg)
+ return {
+ 'success': True,
+ 'message': msg
+ }
+
+ entity = entities[0]
+ if entity.entity_type.lower() == 'project':
+ project_name = entity['full_name']
+ else:
+ project_name = entity['project']['full_name']
+
+ self.db_con.install()
+ self.db_con.Session['AVALON_PROJECT'] = project_name
+
+ for entity in entities:
+ for key in custom_attributes:
+ # check if entity has that attribute
+ if key not in entity['custom_attributes']:
+ self.log.debug(
+ 'Hierachical attribute "{}" not found on "{}"'.format(
+ key, entity.get('name', entity)
+ )
+ )
+ continue
+
+ value = self.get_hierarchical_value(key, entity)
+ if value is None:
+ self.log.warning(
+ 'Hierarchical attribute "{}" not set on "{}"'.format(
+ key, entity.get('name', entity)
+ )
+ )
+ continue
+
+ self.update_hierarchical_attribute(entity, key, value)
+
+ except Exception:
+ self.log.error(
+ 'Action "{}" failed'.format(self.label),
+ exc_info=True
+ )
+
+ finally:
+ self.db_con.uninstall()
+
+ if job['status'] in ('queued', 'running'):
+ job['status'] = 'failed'
+ session.commit()
return True
diff --git a/pype/ftrack/actions/action_sync_to_avalon_local.py b/pype/ftrack/actions/action_sync_to_avalon_local.py
index bed28e1bef..34070c7e1f 100644
--- a/pype/ftrack/actions/action_sync_to_avalon_local.py
+++ b/pype/ftrack/actions/action_sync_to_avalon_local.py
@@ -6,6 +6,7 @@ import json
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib as ftracklib
+from pype.vendor.ftrack_api import session as fa_session
class SyncToAvalon(BaseAction):
@@ -176,6 +177,18 @@ class SyncToAvalon(BaseAction):
job['status'] = 'failed'
session.commit()
+ event = fa_session.ftrack_api.event.base.Event(
+ topic='ftrack.action.launch',
+ data=dict(
+ actionIdentifier='sync.hierarchical.attrs.local',
+ selection=event['data']['selection']
+ ),
+ source=dict(
+ user=event['source']['user']
+ )
+ )
+ session.event_hub.publish(event, on_error='ignore')
+
if len(message) > 0:
message = "Unable to sync: {}".format(message)
return {
diff --git a/pype/ftrack/events/action_sync_hier_attrs.py b/pype/ftrack/events/action_sync_hier_attrs.py
new file mode 100644
index 0000000000..7fa024edf4
--- /dev/null
+++ b/pype/ftrack/events/action_sync_hier_attrs.py
@@ -0,0 +1,274 @@
+import os
+import sys
+import json
+import argparse
+import logging
+import collections
+
+from pypeapp import config
+from pype.vendor import ftrack_api
+from pype.ftrack import BaseAction, lib
+from avalon.tools.libraryloader.io_nonsingleton import DbConnector
+from bson.objectid import ObjectId
+
+
+class SyncHierarchicalAttrs(BaseAction):
+
+ db_con = DbConnector()
+ ca_mongoid = lib.get_ca_mongoid()
+
+ #: Action identifier.
+ identifier = 'sync.hierarchical.attrs'
+ #: Action label.
+ label = 'Sync HierAttrs'
+ #: Action description.
+ description = 'Synchronize hierarchical attributes'
+ #: Icon
+ icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format(
+ os.environ.get(
+ 'PYPE_STATICS_SERVER',
+ 'http://localhost:{}'.format(
+ config.get_presets().get('services', {}).get(
+ 'statics_server', {}
+ ).get('default_port', 8021)
+ )
+ )
+ )
+
+ def register(self):
+ self.session.event_hub.subscribe(
+ 'topic=ftrack.action.discover',
+ self._discover
+ )
+
+ self.session.event_hub.subscribe(
+ 'topic=ftrack.action.launch and data.actionIdentifier={}'.format(
+ self.identifier
+ ),
+ self._launch
+ )
+
+ def discover(self, session, entities, event):
+ ''' Validation '''
+ role_check = False
+ discover = False
+ role_list = ['Pypeclub', 'Administrator', 'Project Manager']
+ user = session.query(
+ 'User where id is "{}"'.format(event['source']['user']['id'])
+ ).one()
+
+ for role in user['user_security_roles']:
+ if role['security_role']['name'] in role_list:
+ role_check = True
+ break
+ print(self.icon)
+ if role_check is True:
+ for entity in entities:
+ context_type = entity.get('context_type', '').lower()
+ if (
+ context_type in ('show', 'task') and
+ entity.entity_type.lower() != 'task'
+ ):
+ discover = True
+ break
+
+ return discover
+
+ def launch(self, session, entities, event):
+ user = session.query(
+ 'User where id is "{}"'.format(event['source']['user']['id'])
+ ).one()
+
+ job = session.create('Job', {
+ 'user': user,
+ 'status': 'running',
+ 'data': json.dumps({
+ 'description': 'Sync Hierachical attributes'
+ })
+ })
+ session.commit()
+
+ try:
+ # Collect hierarchical attrs
+ custom_attributes = {}
+ all_avalon_attr = session.query(
+ 'CustomAttributeGroup where name is "avalon"'
+ ).one()
+ for cust_attr in all_avalon_attr['custom_attribute_configurations']:
+ if 'avalon_' in cust_attr['key']:
+ continue
+
+ if not cust_attr['is_hierarchical']:
+ continue
+
+ if cust_attr['default']:
+ self.log.warning((
+ 'Custom attribute "{}" has set default value.'
+ ' This attribute can\'t be synchronized'
+ ).format(cust_attr['label']))
+ continue
+
+ custom_attributes[cust_attr['key']] = cust_attr
+
+ if not custom_attributes:
+ msg = 'No hierarchical attributes to sync.'
+ self.log.debug(msg)
+ return {
+ 'success': True,
+ 'message': msg
+ }
+
+ entity = entities[0]
+ if entity.entity_type.lower() == 'project':
+ project_name = entity['full_name']
+ else:
+ project_name = entity['project']['full_name']
+
+ self.db_con.install()
+ self.db_con.Session['AVALON_PROJECT'] = project_name
+
+ for entity in entities:
+ for key in custom_attributes:
+ # check if entity has that attribute
+ if key not in entity['custom_attributes']:
+ self.log.debug(
+ 'Hierachical attribute "{}" not found on "{}"'.format(
+ key, entity.get('name', entity)
+ )
+ )
+ continue
+
+ value = self.get_hierarchical_value(key, entity)
+ if value is None:
+ self.log.warning(
+ 'Hierarchical attribute "{}" not set on "{}"'.format(
+ key, entity.get('name', entity)
+ )
+ )
+ continue
+
+ self.update_hierarchical_attribute(entity, key, value)
+
+ except Exception:
+ self.log.error(
+ 'Action "{}" failed'.format(self.label),
+ exc_info=True
+ )
+
+ finally:
+ self.db_con.uninstall()
+
+ if job['status'] in ('queued', 'running'):
+ job['status'] = 'failed'
+ session.commit()
+
+ return True
+
+ def get_hierarchical_value(self, key, entity):
+ value = entity['custom_attributes'][key]
+ if (
+ value is not None or
+ entity.entity_type.lower() == 'project'
+ ):
+ return value
+
+ return self.get_hierarchical_value(key, entity['parent'])
+
+ def update_hierarchical_attribute(self, entity, key, value):
+ if (
+ entity['context_type'].lower() not in ('show', 'task') or
+ entity.entity_type.lower() == 'task'
+ ):
+ return
+ # collect entity's custom attributes
+ custom_attributes = entity.get('custom_attributes')
+ if not custom_attributes:
+ return
+
+ mongoid = custom_attributes.get(self.ca_mongoid)
+ if not mongoid:
+ self.log.debug('Entity "{}" is not synchronized to avalon.'.format(
+ entity.get('name', entity)
+ ))
+ return
+
+ try:
+ mongoid = ObjectId(mongoid)
+ except Exception:
+ self.log.warning('Entity "{}" has stored invalid MongoID.'.format(
+ entity.get('name', entity)
+ ))
+ return
+ # Find entity in Mongo DB
+ mongo_entity = self.db_con.find_one({'_id': mongoid})
+ if not mongo_entity:
+ self.log.warning(
+ 'Entity "{}" is not synchronized to avalon.'.format(
+ entity.get('name', entity)
+ )
+ )
+ return
+
+ # Change value if entity has set it's own
+ entity_value = custom_attributes[key]
+ if entity_value is not None:
+ value = entity_value
+
+ data = mongo_entity.get('data') or {}
+
+ data[key] = value
+ self.db_con.update_many(
+ {'_id': mongoid},
+ {'$set': {'data': data}}
+ )
+
+ for child in entity.get('children', []):
+ self.update_hierarchical_attribute(child, key, value)
+
+
+def register(session, **kw):
+ '''Register plugin. Called when used as an plugin.'''
+
+ if not isinstance(session, ftrack_api.session.Session):
+ return
+
+ SyncHierarchicalAttrs(session).register()
+
+
+def main(arguments=None):
+ '''Set up logging and register action.'''
+ if arguments is None:
+ arguments = []
+
+ parser = argparse.ArgumentParser()
+ # Allow setting of logging level from arguments.
+ loggingLevels = {}
+ for level in (
+ logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
+ logging.ERROR, logging.CRITICAL
+ ):
+ loggingLevels[logging.getLevelName(level).lower()] = level
+
+ parser.add_argument(
+ '-v', '--verbosity',
+ help='Set the logging output verbosity.',
+ choices=loggingLevels.keys(),
+ default='info'
+ )
+ namespace = parser.parse_args(arguments)
+
+ # Set up basic logging
+ logging.basicConfig(level=loggingLevels[namespace.verbosity])
+
+ session = ftrack_api.Session()
+ register(session)
+
+ # Wait for events
+ logging.info(
+ 'Registered actions and listening for events. Use Ctrl-C to abort.'
+ )
+ session.event_hub.wait()
+
+
+if __name__ == '__main__':
+ raise SystemExit(main(sys.argv[1:]))
diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py
index f1a5b37f36..e78b209fac 100644
--- a/pype/ftrack/events/action_sync_to_avalon.py
+++ b/pype/ftrack/events/action_sync_to_avalon.py
@@ -3,8 +3,11 @@ import sys
import argparse
import logging
import json
+
+from pypeapp import config
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib
+from pype.vendor.ftrack_api import session as fa_session
class Sync_To_Avalon(BaseAction):
@@ -50,7 +53,14 @@ class Sync_To_Avalon(BaseAction):
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = '{}/ftrack/action_icons/SyncToAvalon.svg'.format(
- os.environ.get('PYPE_STATICS_SERVER', '')
+ os.environ.get(
+ 'PYPE_STATICS_SERVER',
+ 'http://localhost:{}'.format(
+ config.get_presets().get('services', {}).get(
+ 'statics_server', {}
+ ).get('default_port', 8021)
+ )
+ )
)
def register(self):
@@ -70,7 +80,7 @@ class Sync_To_Avalon(BaseAction):
''' Validation '''
roleCheck = False
discover = False
- roleList = ['Administrator', 'Project Manager']
+ roleList = ['Pypeclub', 'Administrator', 'Project Manager']
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
@@ -191,6 +201,24 @@ class Sync_To_Avalon(BaseAction):
' - Please check Log for more information'
)
+ finally:
+ if job['status'] in ['queued', 'running']:
+ job['status'] = 'failed'
+
+ session.commit()
+
+ event = fa_session.ftrack_api.event.base.Event(
+ topic='ftrack.action.launch',
+ data=dict(
+ actionIdentifier='sync.hierarchical.attrs',
+ selection=event['data']['selection']
+ ),
+ source=dict(
+ user=event['source']['user']
+ )
+ )
+ session.event_hub.publish(event, on_error='ignore')
+
if len(message) > 0:
message = "Unable to sync: {}".format(message)
return {
diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py
new file mode 100644
index 0000000000..0bb7f21590
--- /dev/null
+++ b/pype/ftrack/events/event_user_assigment.py
@@ -0,0 +1,239 @@
+from pype.vendor import ftrack_api
+from pype.ftrack import BaseEvent, lib
+from avalon.tools.libraryloader.io_nonsingleton import DbConnector
+from bson.objectid import ObjectId
+from pypeapp import config
+from pypeapp import Anatomy
+import subprocess
+import os
+import re
+
+
+class UserAssigmentEvent(BaseEvent):
+ """
+ This script will intercept user assigment / de-assigment event and
+ run shell script, providing as much context as possible.
+
+ It expects configuration file ``presets/ftrack/user_assigment_event.json``.
+ In it, you define paths to scripts to be run for user assigment event and
+ for user-deassigment::
+ {
+ "add": [
+ "/path/to/script1",
+ "/path/to/script2"
+ ],
+ "remove": [
+ "/path/to/script3",
+ "/path/to/script4"
+ ]
+ }
+
+ Those scripts are executed in shell. Three arguments will be passed to
+ to them:
+ 1) user name of user (de)assigned
+ 2) path to workfiles of task user was (de)assigned to
+ 3) path to publish files of task user was (de)assigned to
+ """
+
+ db_con = DbConnector()
+ ca_mongoid = lib.get_ca_mongoid()
+
+ def error(self, *err):
+ for e in err:
+ self.log.error(e)
+
+ def _run_script(self, script, args):
+ """
+ Run shell script with arguments as subprocess
+
+ :param script: script path
+ :type script: str
+ :param args: list of arguments passed to script
+ :type args: list
+ :returns: return code
+ :rtype: int
+ """
+ p = subprocess.call([script, args], shell=True)
+ return p
+
+ def _get_task_and_user(self, session, action, changes):
+ """
+ Get Task and User entities from Ftrack session
+
+ :param session: ftrack session
+ :type session: ftrack_api.session
+ :param action: event action
+ :type action: str
+ :param changes: what was changed by event
+ :type changes: dict
+ :returns: User and Task entities
+ :rtype: tuple
+ """
+ if not changes:
+ return None, None
+
+ if action == 'add':
+ task_id = changes.get('context_id', {}).get('new')
+ user_id = changes.get('resource_id', {}).get('new')
+
+ elif action == 'remove':
+ task_id = changes.get('context_id', {}).get('old')
+ user_id = changes.get('resource_id', {}).get('old')
+
+ if not task_id:
+ return None, None
+
+ if not user_id:
+ return None, None
+
+ task = session.query('Task where id is "{}"'.format(task_id)).one()
+ user = session.query('User where id is "{}"'.format(user_id)).one()
+
+ return task, user
+
+ def _get_asset(self, task):
+ """
+ Get asset from task entity
+
+ :param task: Task entity
+ :type task: dict
+ :returns: Asset entity
+ :rtype: dict
+ """
+ parent = task['parent']
+ self.db_con.install()
+ self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
+
+ avalon_entity = None
+ parent_id = parent['custom_attributes'].get(self.ca_mongoid)
+ if parent_id:
+ parent_id = ObjectId(parent_id)
+ avalon_entity = self.db_con.find_one({
+ '_id': parent_id,
+ 'type': 'asset'
+ })
+
+ if not avalon_entity:
+ avalon_entity = self.db_con.find_one({
+ 'type': 'asset',
+ 'name': parent['name']
+ })
+
+ if not avalon_entity:
+ self.db_con.uninstall()
+ msg = 'Entity "{}" not found in avalon database'.format(
+ parent['name']
+ )
+ self.error(msg)
+ return {
+ 'success': False,
+ 'message': msg
+ }
+ self.db_con.uninstall()
+ return avalon_entity
+
+ def _get_hierarchy(self, asset):
+ """
+ Get hierarchy from Asset entity
+
+ :param asset: Asset entity
+ :type asset: dict
+ :returns: hierarchy string
+ :rtype: str
+ """
+ return asset['data']['hierarchy']
+
+ def _get_template_data(self, task):
+ """
+ Get data to fill template from task
+
+ .. seealso:: :mod:`pypeapp.Anatomy`
+
+ :param task: Task entity
+ :type task: dict
+ :returns: data for anatomy template
+ :rtype: dict
+ """
+ project_name = task['project']['full_name']
+ project_code = task['project']['name']
+ try:
+ root = os.environ['PYPE_STUDIO_PROJECTS_PATH']
+ except KeyError:
+ msg = 'Project ({}) root not set'.format(project_name)
+ self.log.error(msg)
+ return {
+ 'success': False,
+ 'message': msg
+ }
+
+ # fill in template data
+ asset = self._get_asset(task)
+ t_data = {
+ 'root': root,
+ 'project': {
+ 'name': project_name,
+ 'code': project_code
+ },
+ 'asset': asset['name'],
+ 'task': task['name'],
+ 'hierarchy': self._get_hierarchy(asset)
+ }
+
+ return t_data
+
+ def launch(self, session, event):
+ # load shell scripts presets
+ presets = config.get_presets()['ftrack']["user_assigment_event"]
+ if not presets:
+ return
+ for entity in event.get('data', {}).get('entities', []):
+ if entity.get('entity_type') != 'Appointment':
+ continue
+
+ task, user = self._get_task_and_user(session,
+ entity.get('action'),
+ entity.get('changes'))
+
+ if not task or not user:
+ self.log.error(
+ 'Task or User was not found.')
+ continue
+
+ data = self._get_template_data(task)
+ # format directories to pass to shell script
+ anatomy = Anatomy(data["project"]["name"])
+ # formatting work dir is easiest part as we can use whole path
+ work_dir = anatomy.format(data)['avalon']['work']
+ # we also need publish but not whole
+ publish = anatomy.format_all(data)['partial']['avalon']['publish']
+ # now find path to {asset}
+ m = re.search("(^.+?{})".format(data['asset']),
+ publish)
+
+ if not m:
+ msg = 'Cannot get part of publish path {}'.format(publish)
+ self.log.error(msg)
+ return {
+ 'success': False,
+ 'message': msg
+ }
+ publish_dir = m.group(1)
+
+ for script in presets.get(entity.get('action')):
+ self.log.info(
+ '[{}] : running script for user {}'.format(
+ entity.get('action'), user["username"]))
+ self._run_script(script, [user["username"],
+ work_dir, publish_dir])
+
+ return True
+
+
+def register(session, **kw):
+ """
+ Register plugin. Called when used as an plugin.
+ """
+ if not isinstance(session, ftrack_api.session.Session):
+ return
+
+ UserAssigmentEvent(session).register()
diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py
index a9aac47228..6a57704fff 100644
--- a/pype/nuke/lib.py
+++ b/pype/nuke/lib.py
@@ -343,8 +343,6 @@ def reset_frame_range_handles():
"""Set frame range to current asset"""
root = nuke.root()
- fps = float(api.Session.get("AVALON_FPS", 25))
- root["fps"].setValue(fps)
name = api.Session["AVALON_ASSET"]
asset = io.find_one({"name": name, "type": "asset"})
@@ -356,7 +354,7 @@ def reset_frame_range_handles():
data = asset["data"]
missing_cols = []
- check_cols = ["fstart", "fend", "handle_start", "handle_end"]
+ check_cols = ["fps", "fstart", "fend", "handle_start", "handle_end"]
for col in check_cols:
if col not in data:
@@ -373,20 +371,29 @@ def reset_frame_range_handles():
handles = avalon.nuke.get_handles(asset)
handle_start, handle_end = pype.get_handle_irregular(asset)
- log.info("__ handles: `{}`".format(handles))
- log.info("__ handle_start: `{}`".format(handle_start))
- log.info("__ handle_end: `{}`".format(handle_end))
-
+ fps = asset["data"]["fps"]
edit_in = int(asset["data"]["fstart"]) - handle_start
edit_out = int(asset["data"]["fend"]) + handle_end
+ root["fps"].setValue(fps)
root["first_frame"].setValue(edit_in)
root["last_frame"].setValue(edit_out)
+ log.info("__ handles: `{}`".format(handles))
+ log.info("__ handle_start: `{}`".format(handle_start))
+ log.info("__ handle_end: `{}`".format(handle_end))
+ log.info("__ edit_in: `{}`".format(edit_in))
+ log.info("__ edit_out: `{}`".format(edit_out))
+ log.info("__ fps: `{}`".format(fps))
+
# setting active viewers
nuke.frame(int(asset["data"]["fstart"]))
- vv = nuke.activeViewer().node()
+ try:
+ vv = nuke.activeViewer().node()
+ except AttributeError:
+ log.error("No active viewer. Select any node and hit num `1`")
+ return
range = '{0}-{1}'.format(
int(asset["data"]["fstart"]),
diff --git a/pype/plugins/ftrack/publish/collect_ftrack_api.py b/pype/plugins/ftrack/publish/collect_ftrack_api.py
index e4923cac98..d09baec676 100644
--- a/pype/plugins/ftrack/publish/collect_ftrack_api.py
+++ b/pype/plugins/ftrack/publish/collect_ftrack_api.py
@@ -18,6 +18,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
ftrack_log = logging.getLogger('ftrack_api')
ftrack_log.setLevel(logging.WARNING)
+ ftrack_log = logging.getLogger('ftrack_api_old')
+ ftrack_log.setLevel(logging.WARNING)
+
# Collect session
session = ftrack_api.Session()
context.data["ftrackSession"] = session
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
index 9be0210c4c..02455454bb 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
@@ -49,14 +49,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
for comp in instance.data['representations']:
self.log.debug('component {}'.format(comp))
- if comp.get('thumbnail'):
+ if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])):
location = self.get_ftrack_location(
'ftrack.server', ft_session
)
component_data = {
"name": "thumbnail" # Default component name is "main".
}
- elif comp.get('preview'):
+ comp['thumbnail'] = True
+ elif comp.get('preview') or ("preview" in comp.get('tags', [])):
'''
Ftrack bug requirement:
- Start frame must be 0
@@ -120,7 +121,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
componentList.append(component_item)
# Create copy with ftrack.unmanaged location if thumb or prev
- if comp.get('thumbnail') or comp.get('preview'):
+ if comp.get('thumbnail') or comp.get('preview') \
+ or ("preview" in comp.get('tags', [])) \
+ or ("thumbnail" in comp.get('tags', [])):
unmanaged_loc = self.get_ftrack_location(
'ftrack.unmanaged', ft_session
)
@@ -148,7 +151,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
componentList.append(component_item_src)
-
self.log.debug('componentsList: {}'.format(str(componentList)))
instance.data["ftrackComponentsList"] = componentList
diff --git a/pype/plugins/global/_publish_unused/extract_quicktime.py b/pype/plugins/global/_publish_unused/extract_quicktime.py
new file mode 100644
index 0000000000..6a33d825d0
--- /dev/null
+++ b/pype/plugins/global/_publish_unused/extract_quicktime.py
@@ -0,0 +1,86 @@
+import os
+import pyblish.api
+import subprocess
+from pype.vendor import clique
+
+
+class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
+ """Resolve any dependency issies
+
+ This plug-in resolves any paths which, if not updated might break
+ the published file.
+
+ The order of families is important, when working with lookdev you want to
+ first publish the texture, update the texture paths in the nodes and then
+ publish the shading network. Same goes for file dependent assets.
+ """
+
+ label = "Extract Quicktime"
+ order = pyblish.api.ExtractorOrder
+ families = ["imagesequence", "render", "write", "source"]
+ hosts = ["shell"]
+
+ def process(self, instance):
+ # fps = instance.data.get("fps")
+ # start = instance.data.get("startFrame")
+ # stagingdir = os.path.normpath(instance.data.get("stagingDir"))
+ #
+ # collected_frames = os.listdir(stagingdir)
+ # collections, remainder = clique.assemble(collected_frames)
+ #
+ # full_input_path = os.path.join(
+ # stagingdir, collections[0].format('{head}{padding}{tail}')
+ # )
+ # self.log.info("input {}".format(full_input_path))
+ #
+ # filename = collections[0].format('{head}')
+ # if not filename.endswith('.'):
+ # filename += "."
+ # movFile = filename + "mov"
+ # full_output_path = os.path.join(stagingdir, movFile)
+ #
+ # self.log.info("output {}".format(full_output_path))
+ #
+ # config_data = instance.context.data['output_repre_config']
+ #
+ # proj_name = os.environ.get('AVALON_PROJECT', '__default__')
+ # profile = config_data.get(proj_name, config_data['__default__'])
+ #
+ # input_args = []
+ # # overrides output file
+ # input_args.append("-y")
+ # # preset's input data
+ # input_args.extend(profile.get('input', []))
+ # # necessary input data
+ # input_args.append("-start_number {}".format(start))
+ # input_args.append("-i {}".format(full_input_path))
+ # input_args.append("-framerate {}".format(fps))
+ #
+ # output_args = []
+ # # preset's output data
+ # output_args.extend(profile.get('output', []))
+ # # output filename
+ # output_args.append(full_output_path)
+ # mov_args = [
+ # "ffmpeg",
+ # " ".join(input_args),
+ # " ".join(output_args)
+ # ]
+ # subprocess_mov = " ".join(mov_args)
+ # sub_proc = subprocess.Popen(subprocess_mov)
+ # sub_proc.wait()
+ #
+ # if not os.path.isfile(full_output_path):
+ # raise("Quicktime wasn't created succesfully")
+ #
+ # if "representations" not in instance.data:
+ # instance.data["representations"] = []
+ #
+ # representation = {
+ # 'name': 'mov',
+ # 'ext': 'mov',
+ # 'files': movFile,
+ # "stagingDir": stagingdir,
+ # "preview": True
+ # }
+ # instance.data["representations"].append(representation)
diff --git a/pype/plugins/global/_publish_unused/extract_review.py b/pype/plugins/global/_publish_unused/extract_review.py
deleted file mode 100644
index 885db1cfc9..0000000000
--- a/pype/plugins/global/_publish_unused/extract_review.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# import os
-# import pyblish.api
-# import subprocess
-# from pype.vendor import clique
-# from pypeapp import config
-#
-#
-# class ExtractReview(pyblish.api.InstancePlugin):
-# """Resolve any dependency issies
-#
-# This plug-in resolves any paths which, if not updated might break
-# the published file.
-#
-# The order of families is important, when working with lookdev you want to
-# first publish the texture, update the texture paths in the nodes and then
-# publish the shading network. Same goes for file dependent assets.
-# """
-#
-# label = "Extract Review"
-# order = pyblish.api.ExtractorOrder
-# # families = ["imagesequence", "render", "write", "source"]
-# # hosts = ["shell"]
-#
-# def process(self, instance):
-# # adding plugin attributes from presets
-# publish_presets = config.get_presets()["plugins"]["global"]["publish"]
-# plugin_attrs = publish_presets[self.__class__.__name__]
-#
-#
-# fps = instance.data.get("fps")
-# start = instance.data.get("startFrame")
-# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
-#
-# collected_frames = os.listdir(stagingdir)
-# collections, remainder = clique.assemble(collected_frames)
-#
-# full_input_path = os.path.join(
-# stagingdir, collections[0].format('{head}{padding}{tail}')
-# )
-# self.log.info("input {}".format(full_input_path))
-#
-# filename = collections[0].format('{head}')
-# if not filename.endswith('.'):
-# filename += "."
-# movFile = filename + "mov"
-# full_output_path = os.path.join(stagingdir, movFile)
-#
-# self.log.info("output {}".format(full_output_path))
-#
-# config_data = instance.context.data['output_repre_config']
-#
-# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
-# profile = config_data.get(proj_name, config_data['__default__'])
-#
-# input_args = []
-# # overrides output file
-# input_args.append("-y")
-# # preset's input data
-# input_args.extend(profile.get('input', []))
-# # necessary input data
-# input_args.append("-start_number {}".format(start))
-# input_args.append("-i {}".format(full_input_path))
-# input_args.append("-framerate {}".format(fps))
-#
-# output_args = []
-# # preset's output data
-# output_args.extend(profile.get('output', []))
-# # output filename
-# output_args.append(full_output_path)
-# mov_args = [
-# "ffmpeg",
-# " ".join(input_args),
-# " ".join(output_args)
-# ]
-# subprocess_mov = " ".join(mov_args)
-# sub_proc = subprocess.Popen(subprocess_mov)
-# sub_proc.wait()
-#
-# if not os.path.isfile(full_output_path):
-# raise("Quicktime wasn't created succesfully")
-#
-# if "representations" not in instance.data:
-# instance.data["representations"] = []
-#
-# representation = {
-# 'name': 'mov',
-# 'ext': 'mov',
-# 'files': movFile,
-# "stagingDir": stagingdir,
-# "preview": True
-# }
-# instance.data["representations"].append(representation)
diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py
index ad128c099b..ed48404a98 100644
--- a/pype/plugins/global/publish/collect_filesequences.py
+++ b/pype/plugins/global/publish/collect_filesequences.py
@@ -6,6 +6,7 @@ from pprint import pformat
import pyblish.api
from avalon import api
+import pype.api as pype
def collect(root,
@@ -64,7 +65,7 @@ def collect(root,
return collections
-class CollectFileSequences(pyblish.api.ContextPlugin):
+class CollectRenderedFrames(pyblish.api.ContextPlugin):
"""Gather file sequences from working directory
When "FILESEQUENCE" environment variable is set these paths (folders or
@@ -87,7 +88,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder
targets = ["filesequence"]
- label = "File Sequences"
+ label = "RenderedFrames"
def process(self, context):
if os.environ.get("PYPE_PUBLISH_PATHS"):
@@ -128,6 +129,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
self.log.info("setting session using metadata")
api.Session.update(session)
os.environ.update(session)
+
else:
# Search in directory
data = dict()
@@ -161,6 +163,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
assert isinstance(families, (list, tuple)), "Must be iterable"
assert families, "Must have at least a single family"
families.append("ftrack")
+ families.append("review")
for collection in collections:
instance = context.create_instance(str(collection))
self.log.info("Collection: %s" % list(collection))
@@ -205,7 +208,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
'files': list(collection),
"stagingDir": root,
"anatomy_template": "render",
- "frameRate": fps
+ "frameRate": fps,
+ "tags": ['review']
}
instance.data["representations"].append(representation)
diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py
index 34ee33f602..5f16cc91f2 100644
--- a/pype/plugins/global/publish/extract_burnin.py
+++ b/pype/plugins/global/publish/extract_burnin.py
@@ -2,6 +2,7 @@ import os
import subprocess
import pype.api
import json
+import pyblish
class ExtractBurnin(pype.api.Extractor):
@@ -14,7 +15,8 @@ class ExtractBurnin(pype.api.Extractor):
"""
label = "Quicktime with burnins"
- families = ["burnin"]
+ order = pyblish.api.ExtractorOrder + 0.03
+ families = ["review", "burnin"]
optional = True
def process(self, instance):
@@ -22,42 +24,68 @@ class ExtractBurnin(pype.api.Extractor):
raise RuntimeError("Burnin needs already created mov to work on.")
# TODO: expand burnin data list to include all usefull keys
- burnin_data = {
+ version = ''
+ if instance.context.data.get('version'):
+ version = "v" + str(instance.context.data['version'])
+
+ prep_data = {
"username": instance.context.data['user'],
"asset": os.environ['AVALON_ASSET'],
"task": os.environ['AVALON_TASK'],
"start_frame": int(instance.data['startFrame']),
- "version": "v" + str(instance.context.data['version'])
+ "version": version
}
+ self.log.debug("__ prep_data: {}".format(prep_data))
+ for i, repre in enumerate(instance.data["representations"]):
+ self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre))
- for repre in instance.data["representations"]:
- if (not repre.get("burnin", False) or
- "burnin" not in repre.get("tags", [])):
+ if "burnin" not in repre.get("tags", []):
continue
- stagingdir = self.staging_dir(instance)
+ stagingdir = repre["stagingDir"]
filename = "{0}".format(repre["files"])
- movieFileBurnin = filename + "Burn" + ".mov"
+ name = "_burnin"
+ movieFileBurnin = filename.replace(".mov", "") + name + ".mov"
- full_movie_path = os.path.join(stagingdir, repre["files"])
- full_burnin_path = os.path.join(stagingdir, movieFileBurnin)
+ full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"])
+ full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin)
+ self.log.debug("__ full_burnin_path: {}".format(full_burnin_path))
burnin_data = {
"input": full_movie_path.replace("\\", "/"),
"output": full_burnin_path.replace("\\", "/"),
- "burnin_data": burnin_data
- }
+ "burnin_data": prep_data
+ }
+
+ self.log.debug("__ burnin_data2: {}".format(burnin_data))
json_data = json.dumps(burnin_data)
- scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'],
+ scriptpath = os.path.normpath(os.path.join(os.environ['PYPE_MODULE_ROOT'],
"pype",
"scripts",
- "otio_burnin.py")
+ "otio_burnin.py"))
- p = subprocess.Popen(
- ['python', scriptpath, json_data]
- )
- p.wait()
+ self.log.debug("__ scriptpath: {}".format(scriptpath))
+ self.log.debug("__ EXE: {}".format(os.getenv("PYPE_PYTHON_EXE")))
- repre['files']: movieFileBurnin
+ try:
+ p = subprocess.Popen(
+ [os.getenv("PYPE_PYTHON_EXE"), scriptpath, json_data]
+ )
+ p.wait()
+ if not os.path.isfile(full_burnin_path):
+ raise RuntimeError("File not existing: {}".format(full_burnin_path))
+ except Exception as e:
+ raise RuntimeError("Burnin script didn't work: `{}`".format(e))
+
+ if os.path.exists(full_burnin_path):
+ repre_update = {
+ "files": movieFileBurnin,
+ "name": repre["name"]
+ }
+ instance.data["representations"][i].update(repre_update)
+
+ # removing the source mov file
+ os.remove(full_movie_path)
+ self.log.debug("Removed: `{}`".format(full_movie_path))
diff --git a/pype/plugins/global/publish/extract_quicktime.py b/pype/plugins/global/publish/extract_quicktime.py
deleted file mode 100644
index b6ccf38385..0000000000
--- a/pype/plugins/global/publish/extract_quicktime.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import os
-import pyblish.api
-import subprocess
-from pype.vendor import clique
-
-
-class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
- """Resolve any dependency issies
-
- This plug-in resolves any paths which, if not updated might break
- the published file.
-
- The order of families is important, when working with lookdev you want to
- first publish the texture, update the texture paths in the nodes and then
- publish the shading network. Same goes for file dependent assets.
- """
-
- label = "Extract Quicktime"
- order = pyblish.api.ExtractorOrder
- families = ["imagesequence", "render", "write", "source"]
- hosts = ["shell"]
-
- def process(self, instance):
- fps = instance.data.get("fps")
- start = instance.data.get("startFrame")
- stagingdir = os.path.normpath(instance.data.get("stagingDir"))
-
- collected_frames = os.listdir(stagingdir)
- collections, remainder = clique.assemble(collected_frames)
-
- full_input_path = os.path.join(
- stagingdir, collections[0].format('{head}{padding}{tail}')
- )
- self.log.info("input {}".format(full_input_path))
-
- filename = collections[0].format('{head}')
- if not filename.endswith('.'):
- filename += "."
- movFile = filename + "mov"
- full_output_path = os.path.join(stagingdir, movFile)
-
- self.log.info("output {}".format(full_output_path))
-
- config_data = instance.context.data['output_repre_config']
-
- proj_name = os.environ.get('AVALON_PROJECT', '__default__')
- profile = config_data.get(proj_name, config_data['__default__'])
-
- input_args = []
- # overrides output file
- input_args.append("-y")
- # preset's input data
- input_args.extend(profile.get('input', []))
- # necessary input data
- input_args.append("-start_number {}".format(start))
- input_args.append("-i {}".format(full_input_path))
- input_args.append("-framerate {}".format(fps))
-
- output_args = []
- # preset's output data
- output_args.extend(profile.get('output', []))
- # output filename
- output_args.append(full_output_path)
- mov_args = [
- "ffmpeg",
- " ".join(input_args),
- " ".join(output_args)
- ]
- subprocess_mov = " ".join(mov_args)
- sub_proc = subprocess.Popen(subprocess_mov)
- sub_proc.wait()
-
- if not os.path.isfile(full_output_path):
- raise("Quicktime wasn't created succesfully")
-
- if "representations" not in instance.data:
- instance.data["representations"] = []
-
- representation = {
- 'name': 'mov',
- 'ext': 'mov',
- 'files': movFile,
- "stagingDir": stagingdir,
- "preview": True
- }
- instance.data["representations"].append(representation)
diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py
new file mode 100644
index 0000000000..3a764b19c3
--- /dev/null
+++ b/pype/plugins/global/publish/extract_review.py
@@ -0,0 +1,174 @@
+import os
+import pyblish.api
+import subprocess
+from pype.vendor import clique
+from pypeapp import config
+
+
+class ExtractReview(pyblish.api.InstancePlugin):
+ """Extracting Review mov file for Ftrack
+
+ Compulsory attribute of representation is tags list with "review",
+ otherwise the representation is ignored.
+
+ All new represetnations are created and encoded by ffmpeg following
+ presets found in `pype-config/presets/plugins/global/publish.json:ExtractReview:outputs`. To change the file extension
+ filter values use preset's attributes `ext_filter`
+ """
+
+ label = "Extract Review"
+ order = pyblish.api.ExtractorOrder + 0.02
+ families = ["review"]
+
+ def process(self, instance):
+ # adding plugin attributes from presets
+ publish_presets = config.get_presets()["plugins"]["global"]["publish"]
+ plugin_attrs = publish_presets[self.__class__.__name__]
+ output_profiles = plugin_attrs.get("outputs", {})
+
+ inst_data = instance.data
+ fps = inst_data.get("fps")
+ start_frame = inst_data.get("startFrame")
+
+ self.log.debug("Families In: `{}`".format(instance.data["families"]))
+
+ # get representation and loop them
+ representations = instance.data["representations"]
+
+ # filter out mov and img sequences
+ representations_new = representations[:]
+ for repre in representations:
+ if repre['ext'] in plugin_attrs["ext_filter"]:
+ tags = repre.get("tags", [])
+
+ self.log.info("Try repre: {}".format(repre))
+
+ if "review" in tags:
+ staging_dir = repre["stagingDir"]
+ for name, profile in output_profiles.items():
+ self.log.debug("Profile name: {}".format(name))
+
+ ext = profile.get("ext", None)
+ if not ext:
+ ext = "mov"
+ self.log.warning(
+ "`ext` attribute not in output profile. Setting to default ext: `mov`")
+
+ self.log.debug("instance.families: {}".format(instance.data['families']))
+ self.log.debug("profile.families: {}".format(profile['families']))
+
+ if any(item in instance.data['families'] for item in profile['families']):
+ if isinstance(repre["files"], list):
+ collections, remainder = clique.assemble(
+ repre["files"])
+
+ full_input_path = os.path.join(
+ staging_dir, collections[0].format(
+ '{head}{padding}{tail}')
+ )
+
+ filename = collections[0].format('{head}')
+ if filename.endswith('.'):
+ filename = filename[:-1]
+ else:
+ full_input_path = os.path.join(
+ staging_dir, repre["files"])
+ filename = repre["files"].split(".")[0]
+
+ repr_file = filename + "_{0}.{1}".format(name, ext)
+
+ full_output_path = os.path.join(
+ staging_dir, repr_file)
+
+ self.log.info("input {}".format(full_input_path))
+ self.log.info("output {}".format(full_output_path))
+
+ repre_new = repre.copy()
+
+ new_tags = tags[:]
+ p_tags = profile.get('tags', [])
+ self.log.info("p_tags: `{}`".format(p_tags))
+ # add families
+ [instance.data["families"].append(t)
+ for t in p_tags
+ if t not in instance.data["families"]]
+ # add to
+ [new_tags.append(t) for t in p_tags
+ if t not in new_tags]
+
+ self.log.info("new_tags: `{}`".format(new_tags))
+
+ input_args = []
+
+ # overrides output file
+ input_args.append("-y")
+
+ # preset's input data
+ input_args.extend(profile.get('input', []))
+
+ # necessary input data
+ # adds start arg only if image sequence
+ if "mov" not in repre_new['ext']:
+ input_args.append("-start_number {0} -framerate {1}".format(
+ start_frame, fps))
+
+ input_args.append("-i {}".format(full_input_path))
+
+ output_args = []
+ # preset's output data
+ output_args.extend(profile.get('output', []))
+
+ # letter_box
+ # TODO: add to documentation
+ lb = profile.get('letter_box', None)
+ if lb:
+ output_args.append(
+ "-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
+
+ # output filename
+ output_args.append(full_output_path)
+ mov_args = [
+ "ffmpeg",
+ " ".join(input_args),
+ " ".join(output_args)
+ ]
+ subprcs_cmd = " ".join(mov_args)
+
+ # run subprocess
+ self.log.debug("{}".format(subprcs_cmd))
+ sub_proc = subprocess.Popen(subprcs_cmd)
+ sub_proc.wait()
+
+ if not os.path.isfile(full_output_path):
+ raise FileExistsError(
+ "Quicktime wasn't created succesfully")
+
+ # create representation data
+ repre_new.update({
+ 'name': name,
+ 'ext': ext,
+ 'files': repr_file,
+ "tags": new_tags,
+ "outputName": name
+ })
+ if repre_new.get('preview'):
+ repre_new.pop("preview")
+ if repre_new.get('thumbnail'):
+ repre_new.pop("thumbnail")
+
+ # adding representation
+ representations_new.append(repre_new)
+ # if "delete" in tags:
+ # if "mov" in full_input_path:
+ # os.remove(full_input_path)
+ # self.log.debug("Removed: `{}`".format(full_input_path))
+ else:
+ continue
+ else:
+ continue
+
+ self.log.debug(
+ "new representations: {}".format(representations_new))
+ instance.data["representations"] = representations_new
+
+ self.log.debug("Families Out: `{}`".format(instance.data["families"]))
diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py
index e70657eef9..e758789c37 100644
--- a/pype/plugins/global/publish/integrate_new.py
+++ b/pype/plugins/global/publish/integrate_new.py
@@ -99,18 +99,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# \ /
# o __/
#
- for result in context.data["results"]:
- if not result["success"]:
- self.log.debug(result)
- exc_type, exc_value, exc_traceback = result["error_info"]
- extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
- self.log.debug(
- "Error at line {}: \"{}\"".format(
- extracted_traceback[1], result["error"]
- )
- )
- assert all(result["success"] for result in context.data["results"]), (
- "Atomicity not held, aborting.")
+ # for result in context.data["results"]:
+ # if not result["success"]:
+ # self.log.debug(result)
+ # exc_type, exc_value, exc_traceback = result["error_info"]
+ # extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
+ # self.log.debug(
+ # "Error at line {}: \"{}\"".format(
+ # extracted_traceback[1], result["error"]
+ # )
+ # )
+ # assert all(result["success"] for result in context.data["results"]), (
+ # "Atomicity not held, aborting.")
# Assemble
#
@@ -225,17 +225,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents)
- template_data = {"root": root,
- "project": {"name": PROJECT,
- "code": project['data']['code']},
- "silo": asset['silo'],
- "task": TASK,
- "asset": ASSET,
- "family": instance.data['family'],
- "subset": subset["name"],
- "version": int(version["name"]),
- "hierarchy": hierarchy}
-
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
@@ -257,6 +246,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# | ||
# |_______|
#
+ # create template data for Anatomy
+ template_data = {"root": root,
+ "project": {"name": PROJECT,
+ "code": project['data']['code']},
+ "silo": asset['silo'],
+ "task": TASK,
+ "asset": ASSET,
+ "family": instance.data['family'],
+ "subset": subset["name"],
+ "version": int(version["name"]),
+ "hierarchy": hierarchy}
files = repre['files']
if repre.get('stagingDir'):
@@ -286,8 +286,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
os.path.normpath(
anatomy_filled[template_name]["path"])
)
- self.log.debug(
- "test_dest_files: {}".format(str(test_dest_files)))
+
+ self.log.debug(
+ "test_dest_files: {}".format(str(test_dest_files)))
dst_collections, remainder = clique.assemble(test_dest_files)
dst_collection = dst_collections[0]
@@ -342,6 +343,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template_data["representation"] = repre['ext']
+ if repre.get("outputName"):
+ template_data["output"] = repre['outputName']
+
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = os.path.normpath(
diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py
index 992553cc7e..057fd2362c 100644
--- a/pype/plugins/global/publish/submit_publish_job.py
+++ b/pype/plugins/global/publish/submit_publish_job.py
@@ -276,7 +276,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"families": ["render"],
"source": source,
"user": context.data["user"],
-
+ "version": context.data["version"],
# Optional metadata (for debugging)
"metadata": {
"instance": data,
diff --git a/pype/plugins/maya/load/load_alembic.py b/pype/plugins/maya/load/load_alembic.py
deleted file mode 100644
index 51caaf6adc..0000000000
--- a/pype/plugins/maya/load/load_alembic.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import pype.maya.plugin
-import os
-from pypeapp import config
-
-
-class AbcLoader(pype.maya.plugin.ReferenceLoader):
- """Specific loader of Alembic for the pype.animation family"""
-
- families = ["animation",
- "pointcache"]
- label = "Reference animation"
- representations = ["abc"]
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def process_reference(self, context, name, namespace, data):
-
- import maya.cmds as cmds
-
- try:
- family = context["representation"]["context"]["family"]
- except ValueError:
- family = "animation"
-
- groupName = "{}:{}".format(namespace, name)
- cmds.loadPlugin("AbcImport.mll", quiet=True)
- nodes = cmds.file(self.fname,
- namespace=namespace,
- sharedReferenceFile=False,
- groupReference=True,
- groupName=groupName,
- reference=True,
- returnNewNodes=True)
-
- nodes.pop(0)
- roots = set()
- for node in nodes:
- try:
- roots.add(cmds.ls(node, long=True)[0].split('|')[2])
- except:
- pass
- cmds.parent(roots, world=True)
- cmds.makeIdentity(groupName, apply=False, rotate=True,
- translate=True, scale=True)
- cmds.parent(roots, groupName)
-
- nodes.append(groupName)
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
- c = colors.get(family)
- if c is not None:
- cmds.setAttr(groupName + ".useOutlinerColor", 1)
- cmds.setAttr(groupName + ".outlinerColor",
- c[0], c[1], c[2])
-
- self[:] = nodes
-
- return nodes
-
- def switch(self, container, representation):
- self.update(container, representation)
diff --git a/pype/plugins/maya/load/load_gpucache.py b/pype/plugins/maya/load/load_gpucache.py
new file mode 100644
index 0000000000..b98ca8b7f4
--- /dev/null
+++ b/pype/plugins/maya/load/load_gpucache.py
@@ -0,0 +1,105 @@
+from avalon import api
+import pype.maya.plugin
+import os
+from pypeapp import config
+import pymel.core as pm
+reload(config)
+
+
+class GpuCacheLoader(api.Loader):
+ """Load model Alembic as gpuCache"""
+
+ families = ["model"]
+ representations = ["abc"]
+
+ label = "Import Gpu Cache"
+ order = -5
+ icon = "code-fork"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+ import avalon.maya.lib as lib
+ from avalon.maya.pipeline import containerise
+
+ asset = context['asset']['name']
+ namespace = namespace or lib.unique_namespace(
+ asset + "_",
+ prefix="_" if asset[0].isdigit() else "",
+ suffix="_",
+ )
+
+ cmds.loadPlugin("gpuCache", quiet=True)
+
+ # Root group
+ label = "{}:{}".format(namespace, name)
+ root = cmds.group(name=label, empty=True)
+
+ presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
+ colors = presets['plugins']['maya']['load']['colors']
+ c = colors.get('model')
+ if c is not None:
+ cmds.setAttr(root + ".useOutlinerColor", 1)
+ cmds.setAttr(root + ".outlinerColor",
+ c[0], c[1], c[2])
+
+ # Create transform with shape
+ transform_name = label + "_GPU"
+ transform = cmds.createNode("transform", name=transform_name,
+ parent=root)
+ cache = cmds.createNode("gpuCache",
+ parent=transform,
+ name="{0}Shape".format(transform_name))
+
+ # Set the cache filepath
+ cmds.setAttr(cache + '.cacheFileName', self.fname, type="string")
+ cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
+
+ # Lock parenting of the transform and cache
+ cmds.lockNode([transform, cache], lock=True)
+
+ nodes = [root, transform, cache]
+ self[:] = nodes
+
+ return containerise(
+ name=name,
+ namespace=namespace,
+ nodes=nodes,
+ context=context,
+ loader=self.__class__.__name__)
+
+ def update(self, container, representation):
+
+ import maya.cmds as cmds
+
+ path = api.get_representation_path(representation)
+
+ # Update the cache
+ members = cmds.sets(container['objectName'], query=True)
+ caches = cmds.ls(members, type="gpuCache", long=True)
+
+ assert len(caches) == 1, "This is a bug"
+
+ for cache in caches:
+ cmds.setAttr(cache + ".cacheFileName", path, type="string")
+
+ cmds.setAttr(container["objectName"] + ".representation",
+ str(representation["_id"]),
+ type="string")
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+ def remove(self, container):
+ import maya.cmds as cmds
+ members = cmds.sets(container['objectName'], query=True)
+ cmds.lockNode(members, lock=False)
+ cmds.delete([container['objectName']] + members)
+
+ # Clean up the namespace
+ try:
+ cmds.namespace(removeNamespace=container['namespace'],
+ deleteNamespaceContent=True)
+ except RuntimeError:
+ pass
diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py
deleted file mode 100644
index 478f2e59aa..0000000000
--- a/pype/plugins/maya/load/load_model.py
+++ /dev/null
@@ -1,219 +0,0 @@
-from avalon import api
-import pype.maya.plugin
-import os
-from pypeapp import config
-reload(config)
-
-
-class ModelLoader(pype.maya.plugin.ReferenceLoader):
- """Load the model"""
-
- families = ["model"]
- representations = ["ma"]
- tool_names = ["loader"]
-
- label = "Reference Model"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def process_reference(self, context, name, namespace, data):
-
- import maya.cmds as cmds
- from avalon import maya
-
- with maya.maintained_selection():
-
- groupName = "{}:{}".format(namespace, name)
- nodes = cmds.file(self.fname,
- namespace=namespace,
- groupReference=True,
- groupName=groupName,
- reference=True,
- returnNewNodes=True)
-
- nodes.pop(0)
- roots = set()
- for node in nodes:
- try:
- roots.add(cmds.ls(node, long=True)[0].split('|')[2])
- except:
- pass
- cmds.parent(roots, world=True)
- cmds.makeIdentity(groupName, apply=False, rotate=True,
- translate=True, scale=True)
- cmds.parent(roots, groupName)
-
- nodes.append(groupName)
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
- c = colors.get('model')
- if c is not None:
- cmds.setAttr(groupName + ".useOutlinerColor", 1)
- cmds.setAttr(groupName + ".outlinerColor",
- c[0], c[1], c[2])
-
- self[:] = nodes
-
- return nodes
-
- def switch(self, container, representation):
- self.update(container, representation)
-
-
-class GpuCacheLoader(api.Loader):
- """Load model Alembic as gpuCache"""
-
- families = ["model"]
- representations = ["abc"]
-
- label = "Import Gpu Cache"
- order = -5
- icon = "code-fork"
- color = "orange"
-
- def load(self, context, name, namespace, data):
-
- import maya.cmds as cmds
- import avalon.maya.lib as lib
- from avalon.maya.pipeline import containerise
-
- asset = context['asset']['name']
- namespace = namespace or lib.unique_namespace(
- asset + "_",
- prefix="_" if asset[0].isdigit() else "",
- suffix="_",
- )
-
- cmds.loadPlugin("gpuCache", quiet=True)
-
- # Root group
- label = "{}:{}".format(namespace, name)
- root = cmds.group(name=label, empty=True)
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
- c = colors.get('model')
- if c is not None:
- cmds.setAttr(root + ".useOutlinerColor", 1)
- cmds.setAttr(root + ".outlinerColor",
- c[0], c[1], c[2])
-
- # Create transform with shape
- transform_name = label + "_GPU"
- transform = cmds.createNode("transform", name=transform_name,
- parent=root)
- cache = cmds.createNode("gpuCache",
- parent=transform,
- name="{0}Shape".format(transform_name))
-
- # Set the cache filepath
- cmds.setAttr(cache + '.cacheFileName', self.fname, type="string")
- cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
-
- # Lock parenting of the transform and cache
- cmds.lockNode([transform, cache], lock=True)
-
- nodes = [root, transform, cache]
- self[:] = nodes
-
- return containerise(
- name=name,
- namespace=namespace,
- nodes=nodes,
- context=context,
- loader=self.__class__.__name__)
-
- def update(self, container, representation):
-
- import maya.cmds as cmds
-
- path = api.get_representation_path(representation)
-
- # Update the cache
- members = cmds.sets(container['objectName'], query=True)
- caches = cmds.ls(members, type="gpuCache", long=True)
-
- assert len(caches) == 1, "This is a bug"
-
- for cache in caches:
- cmds.setAttr(cache + ".cacheFileName", path, type="string")
-
- cmds.setAttr(container["objectName"] + ".representation",
- str(representation["_id"]),
- type="string")
-
- def switch(self, container, representation):
- self.update(container, representation)
-
- def remove(self, container):
- import maya.cmds as cmds
- members = cmds.sets(container['objectName'], query=True)
- cmds.lockNode(members, lock=False)
- cmds.delete([container['objectName']] + members)
-
- # Clean up the namespace
- try:
- cmds.namespace(removeNamespace=container['namespace'],
- deleteNamespaceContent=True)
- except RuntimeError:
- pass
-
-
-class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
- """Specific loader of Alembic for the studio.animation family"""
-
- families = ["model"]
- representations = ["abc"]
- tool_names = ["loader"]
-
- label = "Reference Model"
- order = -10
- icon = "code-fork"
- color = "orange"
-
- def process_reference(self, context, name, namespace, data):
-
- import maya.cmds as cmds
-
- groupName = "{}:{}".format(namespace, name)
- cmds.loadPlugin("AbcImport.mll", quiet=True)
- nodes = cmds.file(self.fname,
- namespace=namespace,
- sharedReferenceFile=False,
- groupReference=True,
- groupName="{}:{}".format(namespace, name),
- reference=True,
- returnNewNodes=True)
-
- namespace = cmds.referenceQuery(nodes[0], namespace=True)
-
- nodes.pop(0)
- roots = set()
- for node in nodes:
- try:
- roots.add(cmds.ls(node, long=True)[0].split('|')[2])
- except:
- pass
- cmds.parent(roots, world=True)
- cmds.makeIdentity(groupName, apply=False, rotate=True,
- translate=True, scale=True)
- cmds.parent(roots, groupName)
-
- nodes.append(groupName)
-
- presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
- colors = presets['plugins']['maya']['load']['colors']
- c = colors.get('model')
- if c is not None:
- cmds.setAttr(groupName + ".useOutlinerColor", 1)
- cmds.setAttr(groupName + ".outlinerColor",
- c[0], c[1], c[2])
-
- self[:] = nodes
-
- return roots
-
- def switch(self, container, representation):
- self.update(container, representation)
diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py
new file mode 100644
index 0000000000..199d79c941
--- /dev/null
+++ b/pype/plugins/maya/load/load_reference.py
@@ -0,0 +1,85 @@
+from avalon import api
+import pype.maya.plugin
+import os
+from pypeapp import config
+import pymel.core as pm
+reload(config)
+
+
+class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
+ """Load the model"""
+
+ families = ["model", "pointcache", "animation"]
+ representations = ["ma", "abc"]
+ tool_names = ["loader"]
+
+ label = "Reference"
+ order = -10
+ icon = "code-fork"
+ color = "orange"
+
+ def process_reference(self, context, name, namespace, data):
+
+ import maya.cmds as cmds
+ from avalon import maya
+
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "model"
+
+ with maya.maintained_selection():
+
+ groupName = "{}:{}".format(namespace, name)
+ cmds.loadPlugin("AbcImport.mll", quiet=True)
+ nodes = cmds.file(self.fname,
+ namespace=namespace,
+ sharedReferenceFile=False,
+ groupReference=True,
+ groupName="{}:{}".format(namespace, name),
+ reference=True,
+ returnNewNodes=True)
+
+ namespace = cmds.referenceQuery(nodes[0], namespace=True)
+
+ groupNode = pm.PyNode(groupName)
+ roots = set()
+ print(nodes)
+
+ for node in nodes:
+ try:
+ roots.add(pm.PyNode(node).getAllParents()[-2])
+ except:
+ pass
+ for root in roots:
+ root.setParent(world=True)
+
+ groupNode.root().zeroTransformPivots()
+ for root in roots:
+ root.setParent(groupNode)
+
+ presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
+ colors = presets['plugins']['maya']['load']['colors']
+ c = colors.get(family)
+ if c is not None:
+ groupNode.useOutlinerColor.set(1)
+ groupNode.outlinerColor.set(c[0], c[1], c[2])
+
+ self[:] = nodes
+
+ return nodes
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+# for backwards compatibility
+class AbcLoader(ReferenceLoader):
+ families = ["pointcache", "animation"]
+ representations = ["abc"]
+ tool_names = []
+
+# for backwards compatibility
+class ModelLoader(ReferenceLoader):
+ families = ["model", "pointcache"]
+ representations = ["abc"]
+ tool_names = []
diff --git a/pype/plugins/maya/publish/collect_scene.py b/pype/plugins/maya/publish/collect_scene.py
index 88c9ed7a47..c1e3c75021 100644
--- a/pype/plugins/maya/publish/collect_scene.py
+++ b/pype/plugins/maya/publish/collect_scene.py
@@ -42,7 +42,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
data['representations'] = [{
'name': 'ma',
- 'ext': '.ma',
+ 'ext': 'ma',
'files': file,
"stagingDir": folder,
}]
diff --git a/pype/plugins/maya/publish/extract_animation.py b/pype/plugins/maya/publish/extract_animation.py
index 30d5dae92b..794a80e7a6 100644
--- a/pype/plugins/maya/publish/extract_animation.py
+++ b/pype/plugins/maya/publish/extract_animation.py
@@ -82,7 +82,7 @@ class ExtractAnimation(pype.api.Extractor):
representation = {
'name': 'abc',
- 'ext': '.abc',
+ 'ext': 'abc',
'files': filename,
"stagingDir": dirname,
}
diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py
index 0c7ef02b4b..1fed6c8dd7 100644
--- a/pype/plugins/maya/publish/extract_ass.py
+++ b/pype/plugins/maya/publish/extract_ass.py
@@ -42,7 +42,7 @@ class ExtractAssStandin(pype.api.Extractor):
representation = {
'name': 'ass',
- 'ext': '.ass',
+ 'ext': 'ass',
'files': filename,
"stagingDir": staging_dir
}
diff --git a/pype/plugins/maya/publish/extract_assproxy.py b/pype/plugins/maya/publish/extract_assproxy.py
index bc807be9b0..34c3113e11 100644
--- a/pype/plugins/maya/publish/extract_assproxy.py
+++ b/pype/plugins/maya/publish/extract_assproxy.py
@@ -68,7 +68,7 @@ class ExtractAssProxy(pype.api.Extractor):
representation = {
'name': 'ma',
- 'ext': '.ma',
+ 'ext': 'ma',
'files': filename,
"stagingDir": stagingdir
}
diff --git a/pype/plugins/maya/publish/extract_camera_alembic.py b/pype/plugins/maya/publish/extract_camera_alembic.py
index 01239fd1e8..77e055daa6 100644
--- a/pype/plugins/maya/publish/extract_camera_alembic.py
+++ b/pype/plugins/maya/publish/extract_camera_alembic.py
@@ -75,7 +75,7 @@ class ExtractCameraAlembic(pype.api.Extractor):
representation = {
'name': 'abc',
- 'ext': '.abc',
+ 'ext': 'abc',
'files': filename,
"stagingDir": dir_path,
}
diff --git a/pype/plugins/maya/publish/extract_camera_mayaAscii.py b/pype/plugins/maya/publish/extract_camera_mayaAscii.py
index 152acb98fe..cafee6593d 100644
--- a/pype/plugins/maya/publish/extract_camera_mayaAscii.py
+++ b/pype/plugins/maya/publish/extract_camera_mayaAscii.py
@@ -173,7 +173,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
representation = {
'name': 'ma',
- 'ext': '.ma',
+ 'ext': 'ma',
'files': filename,
"stagingDir": dir_path,
}
diff --git a/pype/plugins/maya/publish/extract_fbx.py b/pype/plugins/maya/publish/extract_fbx.py
index 93a99eea72..73d56f9a2c 100644
--- a/pype/plugins/maya/publish/extract_fbx.py
+++ b/pype/plugins/maya/publish/extract_fbx.py
@@ -213,7 +213,7 @@ class ExtractFBX(pype.api.Extractor):
representation = {
'name': 'mov',
- 'ext': '.mov',
+ 'ext': 'mov',
'files': filename,
"stagingDir": stagingDir,
}
diff --git a/pype/plugins/maya/publish/extract_maya_ascii_raw.py b/pype/plugins/maya/publish/extract_maya_ascii_raw.py
index c8f10d5b9b..895b6acbfe 100644
--- a/pype/plugins/maya/publish/extract_maya_ascii_raw.py
+++ b/pype/plugins/maya/publish/extract_maya_ascii_raw.py
@@ -56,7 +56,7 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
representation = {
'name': 'ma',
- 'ext': '.ma',
+ 'ext': 'ma',
'files': filename,
"stagingDir": dir_path
}
diff --git a/pype/plugins/maya/publish/extract_model.py b/pype/plugins/maya/publish/extract_model.py
index f6d9681222..d6e5e66c23 100644
--- a/pype/plugins/maya/publish/extract_model.py
+++ b/pype/plugins/maya/publish/extract_model.py
@@ -74,7 +74,7 @@ class ExtractModel(pype.api.Extractor):
representation = {
'name': 'ma',
- 'ext': '.ma',
+ 'ext': 'ma',
'files': filename,
"stagingDir": stagingdir,
}
diff --git a/pype/plugins/maya/publish/extract_pointcache.py b/pype/plugins/maya/publish/extract_pointcache.py
index 907dfe0e18..0879a4bfe3 100644
--- a/pype/plugins/maya/publish/extract_pointcache.py
+++ b/pype/plugins/maya/publish/extract_pointcache.py
@@ -84,7 +84,7 @@ class ExtractAlembic(pype.api.Extractor):
representation = {
'name': 'abc',
- 'ext': '.abc',
+ 'ext': 'abc',
'files': filename,
"stagingDir": dirname
}
diff --git a/pype/plugins/maya/publish/extract_quicktime.py b/pype/plugins/maya/publish/extract_quicktime.py
index ff08799c0a..87608af641 100644
--- a/pype/plugins/maya/publish/extract_quicktime.py
+++ b/pype/plugins/maya/publish/extract_quicktime.py
@@ -99,7 +99,6 @@ class ExtractQuicktime(pype.api.Extractor):
playblast = capture_gui.lib.capture_scene(preset)
self.log.info("file list {}".format(playblast))
- # self.log.info("Calculating HUD data overlay")
collected_frames = os.listdir(stagingdir)
collections, remainder = clique.assemble(collected_frames)
@@ -107,61 +106,19 @@ class ExtractQuicktime(pype.api.Extractor):
stagingdir, collections[0].format('{head}{padding}{tail}'))
self.log.info("input {}".format(input_path))
- movieFile = filename + ".mov"
- movieFileBurnin = filename + "Burn" + ".mov"
-
- full_movie_path = os.path.join(stagingdir, movieFile)
- full_burnin_path = os.path.join(stagingdir, movieFileBurnin)
- self.log.info("output {}".format(full_movie_path))
- with avalon.maya.suspended_refresh():
- try:
- (
- ffmpeg
- .input(input_path, framerate=fps, start_number=int(start))
- .output(full_movie_path)
- .run(overwrite_output=True,
- capture_stdout=True,
- capture_stderr=True)
- )
- except ffmpeg.Error as e:
- ffmpeg_error = 'ffmpeg error: {}'.format(e.stderr)
- self.log.error(ffmpeg_error)
- raise RuntimeError(ffmpeg_error)
-
- version = instance.context.data['version']
-
- burnin_data = {
- "input": full_movie_path.replace("\\", "/"),
- "output": full_burnin_path.replace("\\", "/"),
- "burnin_data": {
- "username": instance.context.data['user'],
- "asset": os.environ['AVALON_ASSET'],
- "task": os.environ['AVALON_TASK'],
- "start_frame": int(start),
- "version": "v" + str(version)
- }
- }
-
- json_data = json.dumps(burnin_data)
- scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], "pype", "scripts", "otio_burnin.py")
-
- p = subprocess.Popen(
- ['python', scriptpath, json_data]
- )
- p.wait()
-
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'mov',
'ext': 'mov',
- 'files': movieFileBurnin,
+ 'files': collected_frames,
"stagingDir": stagingdir,
'startFrame': start,
'endFrame': end,
'frameRate': fps,
- 'preview': True
+ 'preview': True,
+ 'tags': ['review']
}
instance.data["representations"].append(representation)
diff --git a/pype/plugins/maya/publish/extract_rendersetup.py b/pype/plugins/maya/publish/extract_rendersetup.py
index b8dbfc178e..c8d8db0bbb 100644
--- a/pype/plugins/maya/publish/extract_rendersetup.py
+++ b/pype/plugins/maya/publish/extract_rendersetup.py
@@ -30,7 +30,7 @@ class ExtractRenderSetup(pype.api.Extractor):
representation = {
'name': 'json',
- 'ext': '.json',
+ 'ext': 'json',
'files': json_filename,
"stagingDir": parent_dir,
}
diff --git a/pype/plugins/maya/publish/extract_rig.py b/pype/plugins/maya/publish/extract_rig.py
index 713d5e2b59..c98e562313 100644
--- a/pype/plugins/maya/publish/extract_rig.py
+++ b/pype/plugins/maya/publish/extract_rig.py
@@ -39,7 +39,7 @@ class ExtractRig(pype.api.Extractor):
representation = {
'name': 'ma',
- 'ext': '.ma',
+ 'ext': 'ma',
'files': filename,
"stagingDir": dir_path
}
diff --git a/pype/plugins/maya/publish/extract_thumbnail.py b/pype/plugins/maya/publish/extract_thumbnail.py
index 4bc1d91174..e47915c4cf 100644
--- a/pype/plugins/maya/publish/extract_thumbnail.py
+++ b/pype/plugins/maya/publish/extract_thumbnail.py
@@ -137,7 +137,7 @@ class ExtractThumbnail(pype.api.Extractor):
representation = {
'name': 'thumbnail',
- 'ext': '.jpg',
+ 'ext': 'jpg',
'files': thumbnail,
"stagingDir": stagingDir,
"thumbnail": True
diff --git a/pype/plugins/maya/publish/extract_vrayproxy.py b/pype/plugins/maya/publish/extract_vrayproxy.py
index b2c84db22b..dcaa910730 100644
--- a/pype/plugins/maya/publish/extract_vrayproxy.py
+++ b/pype/plugins/maya/publish/extract_vrayproxy.py
@@ -59,7 +59,7 @@ class ExtractVRayProxy(pype.api.Extractor):
representation = {
'name': 'vrmesh',
- 'ext': '.vrmesh',
+ 'ext': 'vrmesh',
'files': file_name,
"stagingDir": staging_dir,
}
diff --git a/pype/plugins/nuke/_publish_unused/collect_render_target.py b/pype/plugins/nuke/_publish_unused/collect_render_target.py
new file mode 100644
index 0000000000..6c04414f69
--- /dev/null
+++ b/pype/plugins/nuke/_publish_unused/collect_render_target.py
@@ -0,0 +1,46 @@
+import pyblish.api
+
+
+@pyblish.api.log
+class CollectRenderTarget(pyblish.api.InstancePlugin):
+ """Collect families for all instances"""
+
+ order = pyblish.api.CollectorOrder + 0.2
+ label = "Collect Render Target"
+ hosts = ["nuke", "nukeassist"]
+ families = ['write']
+
+ def process(self, instance):
+
+ node = instance[0]
+
+ self.log.info('processing {}'.format(node))
+
+ families = []
+ if instance.data.get('families'):
+ families += instance.data['families']
+
+ # set for ftrack to accept
+ # instance.data["families"] = ["ftrack"]
+
+ if node["render"].value():
+ # dealing with local/farm rendering
+ if node["render_farm"].value():
+ families.append("render.farm")
+ else:
+ families.append("render.local")
+ else:
+ families.append("render.frames")
+ # to ignore staging dir op in integrate
+ instance.data['transfer'] = False
+
+ families.append('ftrack')
+
+ instance.data["families"] = families
+
+ # Sort/grouped by family (preserving local index)
+ instance.context[:] = sorted(instance.context, key=self.sort_by_family)
+
+ def sort_by_family(self, instance):
+ """Sort by family"""
+ return instance.data.get("families", instance.data.get("family"))
diff --git a/pype/plugins/nuke/publish/extract_script.py b/pype/plugins/nuke/_publish_unused/extract_script.py
similarity index 94%
rename from pype/plugins/nuke/publish/extract_script.py
rename to pype/plugins/nuke/_publish_unused/extract_script.py
index d0be98b93e..7d55ea0da4 100644
--- a/pype/plugins/nuke/publish/extract_script.py
+++ b/pype/plugins/nuke/_publish_unused/extract_script.py
@@ -27,8 +27,8 @@ class ExtractScript(pype.api.Extractor):
shutil.copy(current_script, path)
if "representations" not in instance.data:
- instance.data["representations"] = []
-
+ instance.data["representations"] = list()
+
representation = {
'name': 'nk',
'ext': '.nk',
diff --git a/pype/plugins/nuke/publish/collect_active_viewer.py b/pype/plugins/nuke/publish/collect_active_viewer.py
new file mode 100644
index 0000000000..3bcc1367f3
--- /dev/null
+++ b/pype/plugins/nuke/publish/collect_active_viewer.py
@@ -0,0 +1,14 @@
+import pyblish.api
+import nuke
+
+
+class CollectActiveViewer(pyblish.api.ContextPlugin):
+ """Collect any active viewer from nodes
+ """
+
+ order = pyblish.api.CollectorOrder + 0.3
+ label = "Collect Active Viewer"
+ hosts = ["nuke"]
+
+ def process(self, context):
+ context.data["ViewerProcess"] = nuke.ViewerProcess.node()
diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py
index 7f119f9a1e..cca5a861ff 100644
--- a/pype/plugins/nuke/publish/collect_instances.py
+++ b/pype/plugins/nuke/publish/collect_instances.py
@@ -64,11 +64,11 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
"name": node.name(),
"subset": subset,
"family": avalon_knob_data["family"],
- "families": [family],
+ "families": [avalon_knob_data["family"], family],
"avalonKnob": avalon_knob_data,
"publish": node.knob('publish').value(),
"step": 1,
- "fps": int(nuke.root()['fps'].value())
+ "fps": nuke.root()['fps'].value()
})
diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py
index 2dae39a1fc..f98a3a0f7d 100644
--- a/pype/plugins/nuke/publish/collect_writes.py
+++ b/pype/plugins/nuke/publish/collect_writes.py
@@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Writes"
hosts = ["nuke", "nukeassist"]
- families = ["render.local", "render", "render.farm"]
+ families = ["render", "render.local", "render.farm"]
def process(self, instance):
@@ -66,19 +66,20 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
instance.data['families'].append('ftrack')
if "representations" not in instance.data:
instance.data["representations"] = list()
- try:
- collected_frames = os.listdir(output_dir)
representation = {
'name': ext,
'ext': ext,
- 'files': collected_frames,
"stagingDir": output_dir,
"anatomy_template": "render"
}
- instance.data["representations"].append(representation)
+ try:
+ collected_frames = os.listdir(output_dir)
+ representation['files'] = collected_frames
+ instance.data["representations"].append(representation)
except Exception:
+ instance.data["representations"].append(representation)
self.log.debug("couldn't collect frames: {}".format(label))
if 'render.local' in instance.data['families']:
@@ -96,5 +97,4 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"colorspace": node["colorspace"].value(),
})
-
self.log.debug("instance.data: {}".format(instance.data))
diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py
index f424bf1200..2b185720a6 100644
--- a/pype/plugins/nuke/publish/extract_render_local.py
+++ b/pype/plugins/nuke/publish/extract_render_local.py
@@ -21,7 +21,6 @@ class NukeRenderLocal(pype.api.Extractor):
def process(self, instance):
node = instance[0]
- context = instance.context
self.log.debug("instance collected: {}".format(instance.data))
@@ -29,12 +28,6 @@ class NukeRenderLocal(pype.api.Extractor):
last_frame = instance.data.get("endFrame", None)
node_subset_name = instance.data.get("name", None)
- # swap path to stageDir
- temp_dir = self.staging_dir(instance).replace("\\", "/")
- output_dir = instance.data.get("outputDir")
- path = node['file'].value()
- node['file'].setValue(path.replace(output_dir, temp_dir))
-
self.log.info("Starting render")
self.log.info("Start frame: {}".format(first_frame))
self.log.info("End frame: {}".format(last_frame))
@@ -46,27 +39,26 @@ class NukeRenderLocal(pype.api.Extractor):
int(last_frame)
)
- # swap path back to publish path
path = node['file'].value()
- node['file'].setValue(path.replace(temp_dir, output_dir))
+ out_dir = os.path.dirname(path)
ext = node["file_type"].value()
if "representations" not in instance.data:
instance.data["representations"] = []
- collected_frames = os.listdir(temp_dir)
+ collected_frames = os.listdir(out_dir)
repre = {
'name': ext,
'ext': ext,
'files': collected_frames,
- "stagingDir": temp_dir,
+ "stagingDir": out_dir,
"anatomy_template": "render"
}
instance.data["representations"].append(repre)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name,
- temp_dir
+ out_dir
))
instance.data['family'] = 'render'
diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review_data.py
similarity index 64%
rename from pype/plugins/nuke/publish/extract_review.py
rename to pype/plugins/nuke/publish/extract_review_data.py
index bdbd3d17a6..f5a017321e 100644
--- a/pype/plugins/nuke/publish/extract_review.py
+++ b/pype/plugins/nuke/publish/extract_review_data.py
@@ -2,10 +2,9 @@ import os
import nuke
import pyblish.api
import pype
-from pype.vendor import ffmpeg
-class ExtractDataForReview(pype.api.Extractor):
+class ExtractReviewData(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
@@ -13,8 +12,7 @@ class ExtractDataForReview(pype.api.Extractor):
"""
order = pyblish.api.ExtractorOrder + 0.01
- label = "Extract Review"
- optional = True
+ label = "Extract Review Data"
families = ["review"]
hosts = ["nuke"]
@@ -35,63 +33,15 @@ class ExtractDataForReview(pype.api.Extractor):
if "still" not in instance.data["families"]:
self.render_review_representation(instance,
representation="mov")
- self.log.debug("review mov:")
- self.transcode_mov(instance)
- self.log.debug("instance.data: {}".format(instance.data))
self.render_review_representation(instance,
representation="jpeg")
else:
- self.log.debug("instance: {}".format(instance))
self.render_review_representation(instance, representation="jpeg")
# Restore selection
[i["selected"].setValue(False) for i in nuke.allNodes()]
[i["selected"].setValue(True) for i in selection]
- def transcode_mov(self, instance):
- collection = instance.data["collection"]
- stagingDir = instance.data["stagingDir"].replace("\\", "/")
- file_name = collection.format("{head}mov")
-
- review_mov = os.path.join(stagingDir, file_name).replace("\\", "/")
-
- self.log.info("transcoding review mov: {0}".format(review_mov))
- if instance.data.get("baked_colorspace_movie"):
- input_movie = instance.data["baked_colorspace_movie"]
- out, err = (
- ffmpeg
- .input(input_movie)
- .output(
- review_mov,
- pix_fmt='yuv420p',
- crf=18,
- timecode="00:00:00:01"
- )
- .overwrite_output()
- .run()
- )
-
- self.log.debug("Removing `{0}`...".format(
- instance.data["baked_colorspace_movie"]))
- os.remove(instance.data["baked_colorspace_movie"])
-
- if "representations" not in instance.data:
- instance.data["representations"] = []
-
- representation = {
- 'name': 'review',
- 'ext': 'mov',
- 'files': file_name,
- "stagingDir": stagingDir,
- "anatomy_template": "render",
- "thumbnail": False,
- "preview": True,
- 'startFrameReview': instance.data['startFrame'],
- 'endFrameReview': instance.data['endFrame'],
- 'frameRate': instance.context.data["framerate"]
- }
- instance.data["representations"].append(representation)
-
def render_review_representation(self,
instance,
representation="mov"):
@@ -132,15 +82,20 @@ class ExtractDataForReview(pype.api.Extractor):
temporary_nodes.append(node)
reformat_node = nuke.createNode("Reformat")
- reformat_node["format"].setValue("HD_1080")
- reformat_node["resize"].setValue("fit")
- reformat_node["filter"].setValue("Lanczos6")
- reformat_node["black_outside"].setValue(True)
+
+ ref_node = self.nodes.get("Reformat", None)
+ if ref_node:
+ for k, v in ref_node:
+ self.log.debug("k,v: {0}:{1}".format(k,v))
+ if isinstance(v, unicode):
+ v = str(v)
+ reformat_node[k].setValue(v)
+
reformat_node.setInput(0, previous_node)
previous_node = reformat_node
temporary_nodes.append(reformat_node)
- viewer_process_node = nuke.ViewerProcess.node()
+ viewer_process_node = instance.context.data.get("ViewerProcess")
dag_node = None
if viewer_process_node:
dag_node = nuke.createNode(viewer_process_node.Class())
@@ -162,6 +117,7 @@ class ExtractDataForReview(pype.api.Extractor):
if representation in "mov":
file = fhead + "baked.mov"
+ name = "baked"
path = os.path.join(stagingDir, file).replace("\\", "/")
self.log.debug("Path: {}".format(path))
instance.data["baked_colorspace_movie"] = path
@@ -170,11 +126,11 @@ class ExtractDataForReview(pype.api.Extractor):
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
- thumbnail = False
- preview = True
+ tags = ["review", "delete"]
elif representation in "jpeg":
file = fhead + "jpeg"
+ name = "thumbnail"
path = os.path.join(stagingDir, file).replace("\\", "/")
instance.data["thumbnail"] = path
write_node["file"].setValue(path)
@@ -182,31 +138,29 @@ class ExtractDataForReview(pype.api.Extractor):
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
- thumbnail = True
- preview = False
+ tags = ["thumbnail"]
# retime for
first_frame = int(last_frame) / 2
last_frame = int(last_frame) / 2
- # add into files for integration as representation
- if "representations" not in instance.data:
- instance.data["representations"] = []
-
- repre = {
- 'name': representation,
- 'ext': representation,
- 'files': file,
- "stagingDir": stagingDir,
- "anatomy_template": "render",
- "thumbnail": thumbnail,
- "preview": preview
- }
- instance.data["representations"].append(repre)
+ repre = {
+ 'name': name,
+ 'ext': representation,
+ 'files': file,
+ "stagingDir": stagingDir,
+ "startFrame": first_frame,
+ "endFrame": last_frame,
+ "anatomy_template": "render",
+ "tags": tags
+ }
+ instance.data["representations"].append(repre)
# Render frames
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
+ self.log.debug("representations: {}".format(instance.data["representations"]))
+
# Clean up
for node in temporary_nodes:
nuke.delete(node)
diff --git a/pype/plugins/nuke/publish/validate_active_viewer.py b/pype/plugins/nuke/publish/validate_active_viewer.py
new file mode 100644
index 0000000000..bcf7cab6b3
--- /dev/null
+++ b/pype/plugins/nuke/publish/validate_active_viewer.py
@@ -0,0 +1,18 @@
+import pyblish.api
+import nuke
+
+
+class ValidateActiveViewer(pyblish.api.ContextPlugin):
+ """Validate presentse of the active viewer from nodes
+ """
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Active Viewer"
+ hosts = ["nuke"]
+
+ def process(self, context):
+ viewer_process_node = context.data.get("ViewerProcess")
+
+ assert viewer_process_node, (
+ "Missing active viewer process! Please click on output write node and push key number 1-9"
+ )
diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py
index 841001ef43..93eb84f304 100644
--- a/pype/plugins/nuke/publish/validate_rendered_frames.py
+++ b/pype/plugins/nuke/publish/validate_rendered_frames.py
@@ -11,9 +11,12 @@ class RepairCollectionAction(pyblish.api.Action):
icon = "wrench"
def process(self, context, plugin):
-
+ self.log.info(context[0])
files_remove = [os.path.join(context[0].data["outputDir"], f)
- for f in context[0].data["files"]]
+ for r in context[0].data.get("representations", [])
+ for f in r.get("files", [])
+ ]
+ self.log.info(files_remove)
for f in files_remove:
os.remove(f)
self.log.debug("removing file: {}".format(f))
@@ -38,7 +41,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
if not repre.get('files'):
msg = ("no frames were collected, "
"you need to render them")
- self.log.error(msg)
+ self.log.warning(msg)
raise ValidationException(msg)
collections, remainder = clique.assemble(repre["files"])
diff --git a/pype/plugins/nuke/publish/validate_script.py b/pype/plugins/nuke/publish/validate_script.py
index 4ad76b898b..efb0537246 100644
--- a/pype/plugins/nuke/publish/validate_script.py
+++ b/pype/plugins/nuke/publish/validate_script.py
@@ -24,7 +24,7 @@ class ValidateScript(pyblish.api.InstancePlugin):
# These attributes will be checked
attributes = [
"fps", "fstart", "fend",
- "resolution_width", "resolution_height", "pixel_aspect", "handle_start", "handle_end"
+ "resolution_width", "resolution_height", "handle_start", "handle_end"
]
# Value of these attributes can be found on parents
diff --git a/pype/plugins/nukestudio/publish/collect_handles.py b/pype/plugins/nukestudio/publish/collect_handles.py
index 104a60d02c..03652989b8 100644
--- a/pype/plugins/nukestudio/publish/collect_handles.py
+++ b/pype/plugins/nukestudio/publish/collect_handles.py
@@ -41,7 +41,7 @@ class CollectClipHandles(api.ContextPlugin):
})
for instance in filtered_instances:
- if not instance.data.get("main") or not instance.data.get("handleTag"):
+ if not instance.data.get("main") and not instance.data.get("handleTag"):
self.log.debug("Synchronize handles on: `{}`".format(
instance.data["name"]))
name = instance.data["asset"]
diff --git a/pype/plugins/nukestudio/publish/collect_tag_types.py b/pype/plugins/nukestudio/publish/collect_tag_main.py
similarity index 86%
rename from pype/plugins/nukestudio/publish/collect_tag_types.py
rename to pype/plugins/nukestudio/publish/collect_tag_main.py
index fad9e54735..36d9b95554 100644
--- a/pype/plugins/nukestudio/publish/collect_tag_types.py
+++ b/pype/plugins/nukestudio/publish/collect_tag_main.py
@@ -5,7 +5,7 @@ class CollectClipTagTypes(api.InstancePlugin):
"""Collect Types from Tags of selected track items."""
order = api.CollectorOrder + 0.012
- label = "Collect Plate Type from Tag"
+ label = "Collect main flag"
hosts = ["nukestudio"]
families = ['clip']
@@ -25,7 +25,8 @@ class CollectClipTagTypes(api.InstancePlugin):
t_subset.capitalize())
if "plateMain" in subset_name:
- instance.data["main"] = True
+ if not instance.data.get("main"):
+ instance.data["main"] = True
self.log.info("`plateMain` found in instance.name: `{}`".format(
instance.data["name"]))
return
diff --git a/pype/templates.py b/pype/templates.py
index 5a31e2af45..9fe2e8c68c 100644
--- a/pype/templates.py
+++ b/pype/templates.py
@@ -28,7 +28,7 @@ def get_version_from_path(file):
v: version number in string ('001')
"""
- pattern = re.compile(r"_v([0-9]*)")
+ pattern = re.compile(r"[\._]v([0-9]*)")
try:
v = pattern.findall(file)[0]
return v
diff --git a/res/ftrack/action_icons/SyncHierarchicalAttrs.svg b/res/ftrack/action_icons/SyncHierarchicalAttrs.svg
index 0c59189168..8b7953299f 100644
--- a/res/ftrack/action_icons/SyncHierarchicalAttrs.svg
+++ b/res/ftrack/action_icons/SyncHierarchicalAttrs.svg
@@ -1 +1,9 @@
-
\ No newline at end of file
+
+
diff --git a/res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg b/res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg
new file mode 100644
index 0000000000..f58448ac06
--- /dev/null
+++ b/res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg
@@ -0,0 +1 @@
+
diff --git a/setup/maya/userSetup.py b/setup/maya/userSetup.py
index 7b06fe7f33..b419e9d27e 100644
--- a/setup/maya/userSetup.py
+++ b/setup/maya/userSetup.py
@@ -1,18 +1,25 @@
import os
-import sys
from pypeapp import config
-from pype.maya import lib
+import pype.maya.lib as mlib
from maya import cmds
-def build_shelf():
- presets = config.get_presets()
- shelf_preset = presets['maya'].get('project_shelf')
- if shelf_preset:
- project = os.environ["AVALON_PROJECT"]
- for k, v in shelf_preset['imports'].items():
- sys.modules[k] = __import__(v, fromlist=[project])
+print("starting PYPE usersetup")
- lib.shelf(name=shelf_preset['name'], preset=shelf_preset)
+# build a shelf
+presets = config.get_presets()
+shelf_preset = presets['maya'].get('project_shelf')
-cmds.evalDeferred("build_shelf()")
+
+if shelf_preset:
+ project = os.environ["AVALON_PROJECT"]
+
+ for i in shelf_preset['imports']:
+ import_string = "from {} import {}".format(project, i)
+ print(import_string)
+ exec(import_string)
+
+cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], preset=shelf_preset)")
+
+
+print("finished PYPE usersetup")