Merge branch 'develop' into hotfix/publish-plugin-improvements

This commit is contained in:
Jakub Jezek 2019-11-20 09:26:00 +01:00
commit ecc3716d1b
38 changed files with 5138 additions and 1419 deletions

View file

@ -1,2 +1,2 @@
from .lib import * from .lib import *
from .ftrack_server import FtrackServer from .ftrack_server import FtrackServer, check_ftrack_url

View file

@ -277,10 +277,7 @@ class DeleteAsset(BaseAction):
'message': 'No entities to delete in avalon' 'message': 'No entities to delete in avalon'
} }
or_subquery = [] delete_query = {'_id': {'$in': all_ids}}
for id in all_ids:
or_subquery.append({'_id': id})
delete_query = {'$or': or_subquery}
self.db.delete_many(delete_query) self.db.delete_many(delete_query)
return { return {

View file

@ -97,10 +97,7 @@ class AssetsRemover(BaseAction):
'message': 'None of assets' 'message': 'None of assets'
} }
or_subquery = [] delete_query = {'_id': {'$in': all_ids}}
for id in all_ids:
or_subquery.append({'_id': id})
delete_query = {'$or': or_subquery}
self.db.delete_many(delete_query) self.db.delete_many(delete_query)
self.db.uninstall() self.db.uninstall()

View file

@ -108,6 +108,7 @@ class JobKiller(BaseAction):
'Changing Job ({}) status: {} -> failed' 'Changing Job ({}) status: {} -> failed'
).format(job['id'], origin_status)) ).format(job['id'], origin_status))
except Exception: except Exception:
session.rollback()
self.log.warning(( self.log.warning((
'Changing Job ({}) has failed' 'Changing Job ({}) has failed'
).format(job['id'])) ).format(job['id']))

View file

@ -1,351 +0,0 @@
import os
import sys
import json
import argparse
import logging
import collections
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib
from pype.ftrack.lib.io_nonsingleton import DbConnector
from bson.objectid import ObjectId
class SyncHierarchicalAttrs(BaseAction):
db_con = DbConnector()
ca_mongoid = lib.get_ca_mongoid()
#: Action identifier.
identifier = 'sync.hierarchical.attrs.local'
#: Action label.
label = "Pype Admin"
variant = '- Sync Hier Attrs (Local)'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
def discover(self, session, entities, event):
''' Validation '''
for entity in entities:
if (
entity.get('context_type', '').lower() in ('show', 'task') and
entity.entity_type.lower() != 'task'
):
return True
return False
def launch(self, session, entities, event):
self.interface_messages = {}
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Hierachical attributes'
})
})
session.commit()
self.log.debug('Job with id "{}" created'.format(job['id']))
process_session = ftrack_api.Session(
server_url=session.server_url,
api_key=session.api_key,
api_user=session.api_user,
auto_connect_event_hub=True
)
try:
# Collect hierarchical attrs
self.log.debug('Collecting Hierarchical custom attributes started')
custom_attributes = {}
all_avalon_attr = process_session.query(
'CustomAttributeGroup where name is "avalon"'
).one()
error_key = (
'Hierarchical attributes with set "default" value (not allowed)'
)
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' in cust_attr['key']:
continue
if not cust_attr['is_hierarchical']:
continue
if cust_attr['default']:
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
cust_attr['label']
)
self.log.warning((
'Custom attribute "{}" has set default value.'
' This attribute can\'t be synchronized'
).format(cust_attr['label']))
continue
custom_attributes[cust_attr['key']] = cust_attr
self.log.debug(
'Collecting Hierarchical custom attributes has finished'
)
if not custom_attributes:
msg = 'No hierarchical attributes to sync.'
self.log.debug(msg)
return {
'success': True,
'message': msg
}
entity = entities[0]
if entity.entity_type.lower() == 'project':
project_name = entity['full_name']
else:
project_name = entity['project']['full_name']
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = project_name
_entities = self._get_entities(event, process_session)
for entity in _entities:
self.log.debug(30*'-')
self.log.debug(
'Processing entity "{}"'.format(entity.get('name', entity))
)
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
for key in custom_attributes:
self.log.debug(30*'*')
self.log.debug(
'Processing Custom attribute key "{}"'.format(key)
)
# check if entity has that attribute
if key not in entity['custom_attributes']:
error_key = 'Missing key on entities'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.error((
'- key "{}" not found on "{}"'
).format(key, ent_name))
continue
value = self.get_hierarchical_value(key, entity)
if value is None:
error_key = (
'Missing value for key on entity'
' and its parents (synchronization was skipped)'
)
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.warning((
'- key "{}" not set on "{}" or its parents'
).format(key, ent_name))
continue
self.update_hierarchical_attribute(entity, key, value)
job['status'] = 'done'
session.commit()
except Exception:
self.log.error(
'Action "{}" failed'.format(self.label),
exc_info=True
)
finally:
self.db_con.uninstall()
if job['status'] in ('queued', 'running'):
job['status'] = 'failed'
session.commit()
if self.interface_messages:
title = "Errors during SyncHierarchicalAttrs"
self.show_interface_from_dict(
messages=self.interface_messages, title=title, event=event
)
return True
def get_hierarchical_value(self, key, entity):
value = entity['custom_attributes'][key]
if (
value is not None or
entity.entity_type.lower() == 'project'
):
return value
return self.get_hierarchical_value(key, entity['parent'])
def update_hierarchical_attribute(self, entity, key, value):
if (
entity['context_type'].lower() not in ('show', 'task') or
entity.entity_type.lower() == 'task'
):
return
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
hierarchy = '/'.join(
[a['name'] for a in entity.get('ancestors', [])]
)
if hierarchy:
hierarchy = '/'.join(
[entity['project']['full_name'], hierarchy, entity['name']]
)
elif entity.entity_type.lower() == 'project':
hierarchy = entity['full_name']
else:
hierarchy = '/'.join(
[entity['project']['full_name'], entity['name']]
)
self.log.debug('- updating entity "{}"'.format(hierarchy))
# collect entity's custom attributes
custom_attributes = entity.get('custom_attributes')
if not custom_attributes:
return
mongoid = custom_attributes.get(self.ca_mongoid)
if not mongoid:
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
ent_name
)
)
return
try:
mongoid = ObjectId(mongoid)
except Exception:
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
ent_name
)
)
return
# Find entity in Mongo DB
mongo_entity = self.db_con.find_one({'_id': mongoid})
if not mongo_entity:
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
ent_name, str(mongoid)
)
)
return
# Change value if entity has set it's own
entity_value = custom_attributes[key]
if entity_value is not None:
value = entity_value
data = mongo_entity.get('data') or {}
data[key] = value
self.db_con.update_many(
{'_id': mongoid},
{'$set': {'data': data}}
)
self.log.debug(
'-- stored value "{}"'.format(value)
)
for child in entity.get('children', []):
self.update_hierarchical_attribute(child, key, value)
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
SyncHierarchicalAttrs(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

File diff suppressed because it is too large Load diff

View file

@ -1,266 +0,0 @@
import os
import sys
import time
import argparse
import logging
import json
import collections
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
from pype.ftrack.lib import avalon_sync as ftracklib
from pype.vendor.ftrack_api import session as fa_session
class SyncToAvalon(BaseAction):
'''
Synchronizing data action - from Ftrack to Avalon DB
Stores all information about entity.
- Name(string) - Most important information = identifier of entity
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
- Silo(string) - Last parent except project
- Data(dictionary):
- VisualParent(ObjectId) - Avalon Id of parent asset
- Parents(array of string) - All parent names except project
- Tasks(array of string) - Tasks on asset
- FtrackId(string)
- entityType(string) - entity's type on Ftrack
* All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
* These information are stored also for all parents and children entities.
Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
- action IS NOT creating this Custom attribute if doesn't exist
- run 'Create Custom Attributes' action or do it manually (Not recommended)
If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
- name, parents and silo are checked -> shows error if are not exact the same
- after sync it is not allowed to change names or move entities
If ID in 'avalon_mongo_id' is empty string or is not found in DB:
- tries to find entity by name
- found:
- raise error if ftrackId/visual parent/parents are not same
- not found:
- Creates asset/project
'''
#: Action identifier.
identifier = 'sync.to.avalon.local'
#: Action label.
label = "Pype Admin"
variant = '- Sync To Avalon (Local)'
#: Action description.
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
#: roles that are allowed to register this action
role_list = ['Pypeclub']
#: Action priority
priority = 200
project_query = (
"select full_name, name, custom_attributes"
", project_schema._task_type_schema.types.name"
" from Project where full_name is \"{}\""
)
entities_query = (
"select id, name, parent_id, link, custom_attributes"
" from TypedContext where project.full_name is \"{}\""
)
# Entity type names(lowered) that won't be synchronized with their children
ignore_entity_types = ["task", "milestone"]
def __init__(self, session, plugins_presets):
super(SyncToAvalon, self).__init__(session)
# reload utils on initialize (in case of server restart)
def discover(self, session, entities, event):
''' Validation '''
for entity in entities:
if entity.entity_type.lower() not in ['task', 'assetversion']:
return True
return False
def launch(self, session, entities, event):
time_start = time.time()
message = ""
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Ftrack to Avalon.'
})
})
session.commit()
try:
self.log.debug("Preparing entities for synchronization")
if entities[0].entity_type.lower() == "project":
ft_project_name = entities[0]["full_name"]
else:
ft_project_name = entities[0]["project"]["full_name"]
project_entities = session.query(
self.entities_query.format(ft_project_name)
).all()
ft_project = session.query(
self.project_query.format(ft_project_name)
).one()
entities_by_id = {}
entities_by_parent = collections.defaultdict(list)
entities_by_id[ft_project["id"]] = ft_project
for ent in project_entities:
entities_by_id[ent["id"]] = ent
entities_by_parent[ent["parent_id"]].append(ent)
importable = []
for ent_info in event["data"]["selection"]:
ent = entities_by_id[ent_info["entityId"]]
for link_ent_info in ent["link"]:
link_ent = entities_by_id[link_ent_info["id"]]
if (
ent.entity_type.lower() in self.ignore_entity_types or
link_ent in importable
):
continue
importable.append(link_ent)
def add_children(parent_id):
ents = entities_by_parent[parent_id]
for ent in ents:
if ent.entity_type.lower() in self.ignore_entity_types:
continue
if ent not in importable:
importable.append(ent)
add_children(ent["id"])
# add children of selection to importable
for ent_info in event["data"]["selection"]:
add_children(ent_info["entityId"])
# Check names: REGEX in schema/duplicates - raise error if found
all_names = []
duplicates = []
for entity in importable:
ftracklib.avalon_check_name(entity)
if entity.entity_type.lower() == "project":
continue
if entity['name'] in all_names:
duplicates.append("'{}'".format(entity['name']))
else:
all_names.append(entity['name'])
if len(duplicates) > 0:
# TODO Show information to user and return False
raise ValueError(
"Entity name duplication: {}".format(", ".join(duplicates))
)
# ----- PROJECT ------
avalon_project = ftracklib.get_avalon_project(ft_project)
custom_attributes = ftracklib.get_avalon_attr(session)
# Import all entities to Avalon DB
for entity in importable:
result = ftracklib.import_to_avalon(
session=session,
entity=entity,
ft_project=ft_project,
av_project=avalon_project,
custom_attributes=custom_attributes
)
# TODO better error handling
# maybe split into critical, warnings and messages?
if 'errors' in result and len(result['errors']) > 0:
job['status'] = 'failed'
session.commit()
ftracklib.show_errors(self, event, result['errors'])
return {
'success': False,
'message': "Sync to avalon FAILED"
}
if avalon_project is None:
if 'project' in result:
avalon_project = result['project']
job['status'] = 'done'
except ValueError as ve:
# TODO remove this part!!!!
job['status'] = 'failed'
message = str(ve)
self.log.error(
'Error during syncToAvalon: {}'.format(message),
exc_info=True
)
except Exception as e:
job['status'] = 'failed'
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
log_message = "{}/{}/Line: {}".format(
exc_type, fname, exc_tb.tb_lineno
)
self.log.error(
'Error during syncToAvalon: {}'.format(log_message),
exc_info=True
)
# TODO add traceback to message and show to user
message = (
'Unexpected Error'
' - Please check Log for more information'
)
finally:
if job['status'] in ['queued', 'running']:
job['status'] = 'failed'
session.commit()
time_end = time.time()
self.log.debug("Synchronization took \"{}\"".format(
str(time_end - time_start)
))
if job["status"] != "failed":
self.log.debug("Triggering Sync hierarchical attributes")
self.trigger_action("sync.hierarchical.attrs.local", event)
if len(message) > 0:
message = "Unable to sync: {}".format(message)
return {
'success': False,
'message': message
}
return {
'success': True,
'message': "Synchronization was successfull"
}
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
SyncToAvalon(session, plugins_presets).register()

View file

@ -43,7 +43,7 @@ class ThumbToChildren(BaseAction):
'description': 'Push thumbnails to Childrens' 'description': 'Push thumbnails to Childrens'
}) })
}) })
session.commit()
try: try:
for entity in entities: for entity in entities:
thumbid = entity['thumbnail_id'] thumbid = entity['thumbnail_id']
@ -53,10 +53,11 @@ class ThumbToChildren(BaseAction):
# inform the user that the job is done # inform the user that the job is done
job['status'] = 'done' job['status'] = 'done'
except Exception: except Exception as exc:
session.rollback()
# fail the job if something goes wrong # fail the job if something goes wrong
job['status'] = 'failed' job['status'] = 'failed'
raise raise exc
finally: finally:
session.commit() session.commit()

View file

@ -40,9 +40,9 @@ class ThumbToParent(BaseAction):
'status': 'running', 'status': 'running',
'data': json.dumps({ 'data': json.dumps({
'description': 'Push thumbnails to parents' 'description': 'Push thumbnails to parents'
})
}) })
})
session.commit()
try: try:
for entity in entities: for entity in entities:
parent = None parent = None
@ -74,10 +74,11 @@ class ThumbToParent(BaseAction):
# inform the user that the job is done # inform the user that the job is done
job['status'] = status or 'done' job['status'] = status or 'done'
except Exception as e: except Exception as exc:
session.rollback()
# fail the job if something goes wrong # fail the job if something goes wrong
job['status'] = 'failed' job['status'] = 'failed'
raise e raise exc
finally: finally:
session.commit() session.commit()

View file

@ -1,383 +0,0 @@
import os
import sys
import json
import argparse
import logging
import collections
from pypeapp import config
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib
from pype.ftrack.lib.io_nonsingleton import DbConnector
from bson.objectid import ObjectId
class SyncHierarchicalAttrs(BaseAction):
db_con = DbConnector()
ca_mongoid = lib.get_ca_mongoid()
#: Action identifier.
identifier = 'sync.hierarchical.attrs'
#: Action label.
label = "Pype Admin"
variant = '- Sync Hier Attrs (Server)'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
os.environ.get(
'PYPE_STATICS_SERVER',
'http://localhost:{}'.format(
config.get_presets().get('services', {}).get(
'statics_server', {}
).get('default_port', 8021)
)
)
)
def register(self):
self.session.event_hub.subscribe(
'topic=ftrack.action.discover',
self._discover
)
self.session.event_hub.subscribe(
'topic=ftrack.action.launch and data.actionIdentifier={}'.format(
self.identifier
),
self._launch
)
def discover(self, session, entities, event):
''' Validation '''
role_check = False
discover = False
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
for role in user['user_security_roles']:
if role['security_role']['name'] in role_list:
role_check = True
break
if role_check is True:
for entity in entities:
context_type = entity.get('context_type', '').lower()
if (
context_type in ('show', 'task') and
entity.entity_type.lower() != 'task'
):
discover = True
break
return discover
def launch(self, session, entities, event):
self.interface_messages = {}
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Hierachical attributes'
})
})
session.commit()
self.log.debug('Job with id "{}" created'.format(job['id']))
process_session = ftrack_api.Session(
server_url=session.server_url,
api_key=session.api_key,
api_user=session.api_user,
auto_connect_event_hub=True
)
try:
# Collect hierarchical attrs
self.log.debug('Collecting Hierarchical custom attributes started')
custom_attributes = {}
all_avalon_attr = process_session.query(
'CustomAttributeGroup where name is "avalon"'
).one()
error_key = (
'Hierarchical attributes with set "default" value (not allowed)'
)
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' in cust_attr['key']:
continue
if not cust_attr['is_hierarchical']:
continue
if cust_attr['default']:
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
cust_attr['label']
)
self.log.warning((
'Custom attribute "{}" has set default value.'
' This attribute can\'t be synchronized'
).format(cust_attr['label']))
continue
custom_attributes[cust_attr['key']] = cust_attr
self.log.debug(
'Collecting Hierarchical custom attributes has finished'
)
if not custom_attributes:
msg = 'No hierarchical attributes to sync.'
self.log.debug(msg)
return {
'success': True,
'message': msg
}
entity = entities[0]
if entity.entity_type.lower() == 'project':
project_name = entity['full_name']
else:
project_name = entity['project']['full_name']
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = project_name
_entities = self._get_entities(event, process_session)
for entity in _entities:
self.log.debug(30*'-')
self.log.debug(
'Processing entity "{}"'.format(entity.get('name', entity))
)
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
for key in custom_attributes:
self.log.debug(30*'*')
self.log.debug(
'Processing Custom attribute key "{}"'.format(key)
)
# check if entity has that attribute
if key not in entity['custom_attributes']:
error_key = 'Missing key on entities'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.error((
'- key "{}" not found on "{}"'
).format(key, entity.get('name', entity)))
continue
value = self.get_hierarchical_value(key, entity)
if value is None:
error_key = (
'Missing value for key on entity'
' and its parents (synchronization was skipped)'
)
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
self.interface_messages[error_key].append(
'- key: "{}" - entity: "{}"'.format(key, ent_name)
)
self.log.warning((
'- key "{}" not set on "{}" or its parents'
).format(key, ent_name))
continue
self.update_hierarchical_attribute(entity, key, value)
job['status'] = 'done'
session.commit()
except Exception:
self.log.error(
'Action "{}" failed'.format(self.label),
exc_info=True
)
finally:
self.db_con.uninstall()
if job['status'] in ('queued', 'running'):
job['status'] = 'failed'
session.commit()
if self.interface_messages:
self.show_interface_from_dict(
messages=self.interface_messages,
title="something went wrong",
event=event
)
return True
def get_hierarchical_value(self, key, entity):
value = entity['custom_attributes'][key]
if (
value is not None or
entity.entity_type.lower() == 'project'
):
return value
return self.get_hierarchical_value(key, entity['parent'])
def update_hierarchical_attribute(self, entity, key, value):
if (
entity['context_type'].lower() not in ('show', 'task') or
entity.entity_type.lower() == 'task'
):
return
ent_name = entity.get('name', entity)
if entity.entity_type.lower() == 'project':
ent_name = entity['full_name']
hierarchy = '/'.join(
[a['name'] for a in entity.get('ancestors', [])]
)
if hierarchy:
hierarchy = '/'.join(
[entity['project']['full_name'], hierarchy, entity['name']]
)
elif entity.entity_type.lower() == 'project':
hierarchy = entity['full_name']
else:
hierarchy = '/'.join(
[entity['project']['full_name'], entity['name']]
)
self.log.debug('- updating entity "{}"'.format(hierarchy))
# collect entity's custom attributes
custom_attributes = entity.get('custom_attributes')
if not custom_attributes:
return
mongoid = custom_attributes.get(self.ca_mongoid)
if not mongoid:
error_key = 'Missing MongoID on entities (try SyncToAvalon first)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" is not synchronized to avalon. Skipping'.format(
ent_name
)
)
return
try:
mongoid = ObjectId(mongoid)
except Exception:
error_key = 'Invalid MongoID on entities (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" has stored invalid MongoID. Skipping'.format(
ent_name
)
)
return
# Find entity in Mongo DB
mongo_entity = self.db_con.find_one({'_id': mongoid})
if not mongo_entity:
error_key = 'Entities not found in Avalon DB (try SyncToAvalon)'
if error_key not in self.interface_messages:
self.interface_messages[error_key] = []
if ent_name not in self.interface_messages[error_key]:
self.interface_messages[error_key].append(ent_name)
self.log.warning(
'-- entity "{}" was not found in DB by id "{}". Skipping'.format(
ent_name, str(mongoid)
)
)
return
# Change value if entity has set it's own
entity_value = custom_attributes[key]
if entity_value is not None:
value = entity_value
data = mongo_entity.get('data') or {}
data[key] = value
self.db_con.update_many(
{'_id': mongoid},
{'$set': {'data': data}}
)
for child in entity.get('children', []):
self.update_hierarchical_attribute(child, key, value)
def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.'''
SyncHierarchicalAttrs(session, plugins_presets).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

File diff suppressed because it is too large Load diff

View file

@ -80,10 +80,10 @@ class NextTaskUpdate(BaseEvent):
'>>> [ {} ] updated to [ Ready ]' '>>> [ {} ] updated to [ Ready ]'
).format(path)) ).format(path))
except Exception as e: except Exception as e:
session.rollback()
self.log.warning(( self.log.warning((
'!!! [ {} ] status couldnt be set: [ {} ]' '!!! [ {} ] status couldnt be set: [ {} ]'
).format(path, e)) ).format(path, str(e)), exc_info=True)
session.rollback()
def register(session, plugins_presets): def register(session, plugins_presets):

View file

@ -2,7 +2,7 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent from pype.ftrack import BaseEvent
class Radio_buttons(BaseEvent): class RadioButtons(BaseEvent):
ignore_me = True ignore_me = True

View file

@ -2,7 +2,7 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent, lib from pype.ftrack import BaseEvent, lib
class Sync_to_Avalon(BaseEvent): class SyncToAvalon(BaseEvent):
priority = 100 priority = 100
@ -101,7 +101,7 @@ class Sync_to_Avalon(BaseEvent):
avalon_project = result['project'] avalon_project = result['project']
except Exception as e: except Exception as e:
session.reset() # reset session to clear it session.rollback() # reset session to clear it
message = str(e) message = str(e)
title = 'Hey You! Unknown Error has been raised! (*look below*)' title = 'Hey You! Unknown Error has been raised! (*look below*)'
@ -124,4 +124,4 @@ class Sync_to_Avalon(BaseEvent):
def register(session, plugins_presets): def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.''' '''Register plugin. Called when used as an plugin.'''
Sync_to_Avalon(session, plugins_presets).register() SyncToAvalon(session, plugins_presets).register()

View file

@ -5,7 +5,7 @@ from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent from pype.ftrack import BaseEvent
class Test_Event(BaseEvent): class TestEvent(BaseEvent):
ignore_me = True ignore_me = True
@ -23,4 +23,4 @@ class Test_Event(BaseEvent):
def register(session, plugins_presets): def register(session, plugins_presets):
'''Register plugin. Called when used as an plugin.''' '''Register plugin. Called when used as an plugin.'''
Test_Event(session, plugins_presets).register() TestEvent(session, plugins_presets).register()

View file

@ -62,6 +62,7 @@ class VersionToTaskStatus(BaseEvent):
task['status'] = task_status task['status'] = task_status
session.commit() session.commit()
except Exception as e: except Exception as e:
session.rollback()
self.log.warning('!!! [ {} ] status couldnt be set:\ self.log.warning('!!! [ {} ] status couldnt be set:\
[ {} ]'.format(path, e)) [ {} ]'.format(path, e))
else: else:

View file

@ -1 +1,2 @@
from .ftrack_server import FtrackServer from .ftrack_server import FtrackServer
from .lib import check_ftrack_url

View file

@ -9,11 +9,12 @@ import atexit
import time import time
from urllib.parse import urlparse from urllib.parse import urlparse
import requests
from pype.vendor import ftrack_api from pype.vendor import ftrack_api
from pype.ftrack.lib import credentials from pype.ftrack.lib import credentials
from pype.ftrack.ftrack_server import FtrackServer from pype.ftrack.ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import ftrack_events_mongo_settings from pype.ftrack.ftrack_server.lib import (
ftrack_events_mongo_settings, check_ftrack_url
)
import socket_thread import socket_thread
@ -25,36 +26,6 @@ class MongoPermissionsError(Exception):
super().__init__(message) super().__init__(message)
def check_ftrack_url(url, log_errors=True):
"""Checks if Ftrack server is responding"""
if not url:
print('ERROR: Ftrack URL is not set!')
return None
url = url.strip('/ ')
if 'http' not in url:
if url.endswith('ftrackapp.com'):
url = 'https://' + url
else:
url = 'https://{0}.ftrackapp.com'.format(url)
try:
result = requests.get(url, allow_redirects=False)
except requests.exceptions.RequestException:
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
print('DEBUG: Ftrack server {} is accessible.'.format(url))
return url
def check_mongo_url(host, port, log_error=False): def check_mongo_url(host, port, log_error=False):
"""Checks if mongo server is responding""" """Checks if mongo server is responding"""
sock = None sock = None

View file

@ -100,7 +100,10 @@ class FtrackServer:
log.warning(msg, exc_info=e) log.warning(msg, exc_info=e)
if len(register_functions_dict) < 1: if len(register_functions_dict) < 1:
raise Exception raise Exception((
"There are no events with register function."
" Registered paths: \"{}\""
).format("| ".join(paths)))
# Load presets for setting plugins # Load presets for setting plugins
key = "user" key = "user"

View file

@ -1,4 +1,5 @@
import os import os
import requests
try: try:
from urllib.parse import urlparse, parse_qs from urllib.parse import urlparse, parse_qs
except ImportError: except ImportError:
@ -66,3 +67,33 @@ def get_ftrack_event_mongo_info():
url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth) url = "mongodb://{}{}{}{}".format(user_pass, socket_path, dab, auth)
return url, database, collection return url, database, collection
def check_ftrack_url(url, log_errors=True):
"""Checks if Ftrack server is responding"""
if not url:
print('ERROR: Ftrack URL is not set!')
return None
url = url.strip('/ ')
if 'http' not in url:
if url.endswith('ftrackapp.com'):
url = 'https://' + url
else:
url = 'https://{0}.ftrackapp.com'.format(url)
try:
result = requests.get(url, allow_redirects=False)
except requests.exceptions.RequestException:
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
if (result.status_code != 200 or 'FTRACK_VERSION' not in result.headers):
if log_errors:
print('ERROR: Entered Ftrack URL is not accesible!')
return False
print('DEBUG: Ftrack server {} is accessible.'.format(url))
return url

View file

@ -128,6 +128,7 @@ class BaseHandler(object):
try: try:
return func(*args, **kwargs) return func(*args, **kwargs)
except Exception as exc: except Exception as exc:
self.session.rollback()
msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc)) msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
self.log.error(msg, exc_info=True) self.log.error(msg, exc_info=True)
return { return {

View file

@ -26,6 +26,7 @@ class BaseEvent(BaseHandler):
try: try:
func(*args, **kwargs) func(*args, **kwargs)
except Exception as exc: except Exception as exc:
self.session.rollback()
self.log.error( self.log.error(
'Event "{}" Failed: {}'.format( 'Event "{}" Failed: {}'.format(
self.__class__.__name__, str(exc) self.__class__.__name__, str(exc)

View file

@ -50,6 +50,19 @@ class DbConnector(object):
self._database = None self._database = None
self._is_installed = False self._is_installed = False
def __getitem__(self, key):
# gives direct access to collection withou setting `active_table`
return self._database[key]
def __getattribute__(self, attr):
# not all methods of PyMongo database are implemented with this it is
# possible to use them too
try:
return super(DbConnector, self).__getattribute__(attr)
except AttributeError:
cur_proj = self.Session["AVALON_PROJECT"]
return self._database[cur_proj].__getattribute__(attr)
def install(self): def install(self):
"""Establish a persistent connection to the database""" """Establish a persistent connection to the database"""
if self._is_installed: if self._is_installed:

View file

@ -6,7 +6,7 @@ from Qt import QtCore, QtGui, QtWidgets
from pype.vendor import ftrack_api from pype.vendor import ftrack_api
from pypeapp import style from pypeapp import style
from pype.ftrack import FtrackServer, credentials from pype.ftrack import FtrackServer, check_ftrack_url, credentials
from . import login_dialog from . import login_dialog
from pype import api as pype from pype import api as pype
@ -24,7 +24,8 @@ class FtrackModule:
self.thread_timer = None self.thread_timer = None
self.bool_logged = False self.bool_logged = False
self.bool_action_server = False self.bool_action_server_running = False
self.bool_action_thread_running = False
self.bool_timer_event = False self.bool_timer_event = False
def show_login_widget(self): def show_login_widget(self):
@ -74,28 +75,50 @@ class FtrackModule:
# Actions part # Actions part
def start_action_server(self): def start_action_server(self):
self.bool_action_thread_running = True
self.set_menu_visibility()
if (
self.thread_action_server is not None and
self.bool_action_thread_running is False
):
self.stop_action_server()
if self.thread_action_server is None: if self.thread_action_server is None:
self.thread_action_server = threading.Thread( self.thread_action_server = threading.Thread(
target=self.set_action_server target=self.set_action_server
) )
self.thread_action_server.daemon = True
self.thread_action_server.start() self.thread_action_server.start()
log.info("Ftrack action server launched")
self.bool_action_server = True
self.set_menu_visibility()
def set_action_server(self): def set_action_server(self):
try: first_check = True
self.action_server.run_server() while self.bool_action_thread_running is True:
except Exception as exc: if not check_ftrack_url(os.environ['FTRACK_SERVER']):
log.error( if first_check:
"Ftrack Action server crashed! Please try to start again.", log.warning(
exc_info=True "Could not connect to Ftrack server"
)
first_check = False
time.sleep(1)
continue
log.info(
"Connected to Ftrack server. Running actions session"
) )
# TODO show message to user try:
self.bool_action_server = False self.bool_action_server_running = True
self.set_menu_visibility()
self.action_server.run_server()
if self.bool_action_thread_running:
log.debug("Ftrack action server has stopped")
except Exception:
log.warning(
"Ftrack Action server crashed. Trying to connect again",
exc_info=True
)
self.bool_action_server_running = False
self.set_menu_visibility() self.set_menu_visibility()
first_check = True
self.bool_action_thread_running = False
def reset_action_server(self): def reset_action_server(self):
self.stop_action_server() self.stop_action_server()
@ -103,16 +126,21 @@ class FtrackModule:
def stop_action_server(self): def stop_action_server(self):
try: try:
self.bool_action_thread_running = False
self.action_server.stop_session() self.action_server.stop_session()
if self.thread_action_server is not None: if self.thread_action_server is not None:
self.thread_action_server.join() self.thread_action_server.join()
self.thread_action_server = None self.thread_action_server = None
log.info("Ftrack action server stopped") log.info("Ftrack action server was forced to stop")
self.bool_action_server = False
self.bool_action_server_running = False
self.set_menu_visibility() self.set_menu_visibility()
except Exception as e: except Exception:
log.error("During Killing action server: {0}".format(e)) log.warning(
"Error has happened during Killing action server",
exc_info=True
)
# Definition of Tray menu # Definition of Tray menu
def tray_menu(self, parent_menu): def tray_menu(self, parent_menu):
@ -158,6 +186,9 @@ class FtrackModule:
def tray_start(self): def tray_start(self):
self.validate() self.validate()
def tray_exit(self):
self.stop_action_server()
# Definition of visibility of each menu actions # Definition of visibility of each menu actions
def set_menu_visibility(self): def set_menu_visibility(self):
@ -170,9 +201,9 @@ class FtrackModule:
self.stop_timer_thread() self.stop_timer_thread()
return return
self.aRunActionS.setVisible(not self.bool_action_server) self.aRunActionS.setVisible(not self.bool_action_thread_running)
self.aResetActionS.setVisible(self.bool_action_server) self.aResetActionS.setVisible(self.bool_action_thread_running)
self.aStopActionS.setVisible(self.bool_action_server) self.aStopActionS.setVisible(self.bool_action_thread_running)
if self.bool_timer_event is False: if self.bool_timer_event is False:
self.start_timer_thread() self.start_timer_thread()

View file

@ -562,7 +562,7 @@ def get_subsets(asset_name,
find_dict = {"type": "representation", find_dict = {"type": "representation",
"parent": version_sel["_id"]} "parent": version_sel["_id"]}
filter_repr = {"$or": [{"name": repr} for repr in representations]} filter_repr = {"name": {"$in": representations}}
find_dict.update(filter_repr) find_dict.update(filter_repr)
repres_out = [i for i in io.find(find_dict)] repres_out = [i for i in io.find(find_dict)]

View file

@ -1,5 +1,6 @@
import os import os
import sys import sys
import six
import pyblish.api import pyblish.api
import clique import clique
@ -125,6 +126,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
metadata=asset_metadata metadata=asset_metadata
) )
) )
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
# Adding metadata # Adding metadata
existing_asset_metadata = asset_entity["metadata"] existing_asset_metadata = asset_entity["metadata"]
@ -162,6 +169,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
metadata=assetversion_metadata metadata=assetversion_metadata
) )
) )
try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
# Adding metadata # Adding metadata
existing_assetversion_metadata = assetversion_entity["metadata"] existing_assetversion_metadata = assetversion_entity["metadata"]
@ -170,7 +183,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
# Have to commit the version and asset, because location can't # Have to commit the version and asset, because location can't
# determine the final location without. # determine the final location without.
session.commit() try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
# Component # Component
# Get existing entity. # Get existing entity.
@ -209,7 +227,12 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
session.delete(member) session.delete(member)
del(member) del(member)
session.commit() try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)
# Reset members in memory # Reset members in memory
if "members" in component_entity.keys(): if "members" in component_entity.keys():
@ -320,4 +343,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
) )
else: else:
# Commit changes. # Commit changes.
session.commit() try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)

View file

@ -1,4 +1,6 @@
import sys
import pyblish.api import pyblish.api
import six
class IntegrateFtrackComments(pyblish.api.InstancePlugin): class IntegrateFtrackComments(pyblish.api.InstancePlugin):
@ -21,4 +23,9 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin):
entity["notes"].extend(notes) entity["notes"].extend(notes)
session.commit() try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)

View file

@ -37,6 +37,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
if instance.data.get('version'): if instance.data.get('version'):
version_number = int(instance.data.get('version')) version_number = int(instance.data.get('version'))
else:
raise ValueError("Instance version not set")
family = instance.data['family'].lower() family = instance.data['family'].lower()

View file

@ -1,3 +1,6 @@
import sys
import six
import pyblish.api import pyblish.api
from avalon import io from avalon import io
@ -74,9 +77,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# try to find if entity already exists # try to find if entity already exists
else: else:
query = 'TypedContext where name is "{0}" and project.full_name is "{1}"'.format( query = (
entity_name, self.ft_project["full_name"] 'TypedContext where name is "{0}" and '
) 'project_id is "{1}"'
).format(entity_name, self.ft_project["id"])
try: try:
entity = self.session.query(query).one() entity = self.session.query(query).one()
except Exception: except Exception:
@ -106,7 +110,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
for instance in instances: for instance in instances:
instance.data['ftrackEntity'] = entity instance.data['ftrackEntity'] = entity
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# TASKS # TASKS
tasks = entity_data.get('tasks', []) tasks = entity_data.get('tasks', [])
@ -129,11 +138,21 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
task_type=task, task_type=task,
parent=entity parent=entity
) )
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# Incoming links. # Incoming links.
self.create_links(entity_data, entity) self.create_links(entity_data, entity)
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
if 'childs' in entity_data: if 'childs' in entity_data:
self.import_to_ftrack( self.import_to_ftrack(
@ -143,7 +162,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# Clear existing links. # Clear existing links.
for link in entity.get("incoming_links", []): for link in entity.get("incoming_links", []):
self.session.delete(link) self.session.delete(link)
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# Create new links. # Create new links.
for input in entity_data.get("inputs", []): for input in entity_data.get("inputs", []):
@ -179,7 +203,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
self.log.info(self.task_types) self.log.info(self.task_types)
task['type'] = self.task_types[task_type] task['type'] = self.task_types[task_type]
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return task return task
@ -188,6 +217,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
'name': name, 'name': name,
'parent': parent 'parent': parent
}) })
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return entity return entity

View file

@ -24,4 +24,4 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
rootVersion = pype.get_version_from_path(filename) rootVersion = pype.get_version_from_path(filename)
context.data['version'] = rootVersion context.data['version'] = rootVersion
self.log.info('Scene Version: %s' % context.data('version')) self.log.info('Scene Version: %s' % context.data.get('version'))

View file

@ -1,18 +1,23 @@
import os import os
from os.path import getsize from os.path import getsize
import logging import logging
import speedcopy import sys
import clique import clique
import errno import errno
import pyblish.api import pyblish.api
from avalon import api, io from avalon import api, io
from avalon.vendor import filelink from avalon.vendor import filelink
# this is needed until speedcopy for linux is fixed
if sys.platform == "win32":
from speedcopy import copyfile
else:
from shutil import copyfile
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class IntegrateAssetNew(pyblish.api.InstancePlugin): class IntegrateAssetNew(pyblish.api.InstancePlugin):
"""Resolve any dependency issius """Resolve any dependency issues
This plug-in resolves any paths which, if not updated might break This plug-in resolves any paths which, if not updated might break
the published file. the published file.
@ -474,7 +479,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical # copy file with speedcopy and check if size of files are simetrical
while True: while True:
speedcopy.copyfile(src, dst) copyfile(src, dst)
if str(getsize(src)) in str(getsize(dst)): if str(getsize(src)) in str(getsize(dst)):
break break
@ -500,7 +505,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
subset_name = instance.data["subset"] subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name) self.log.info("Subset '%s' not found, creating.." % subset_name)
self.log.debug("families. %s" % instance.data.get('families')) self.log.debug("families. %s" % instance.data.get('families'))
self.log.debug("families. %s" % type(instance.data.get('families'))) self.log.debug(
"families. %s" % type(instance.data.get('families')))
_id = io.insert_one({ _id = io.insert_one({
"schema": "pype:subset-3.0", "schema": "pype:subset-3.0",

View file

@ -1,8 +1,9 @@
import pyblish.api import pyblish.api
import os import os
class ValidateTemplates(pyblish.api.ContextPlugin): class ValidateTemplates(pyblish.api.ContextPlugin):
"""Check if all templates were filed""" """Check if all templates were filled"""
label = "Validate Templates" label = "Validate Templates"
order = pyblish.api.ValidatorOrder - 0.1 order = pyblish.api.ValidatorOrder - 0.1
@ -18,12 +19,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], "root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
"project": {"name": "D001_projectsx", "project": {"name": "D001_projectsx",
"code": "prjX"}, "code": "prjX"},
"ext": "exr", "ext": "exr",
"version": 3, "version": 3,
"task": "animation", "task": "animation",
"asset": "sh001", "asset": "sh001",
"hierarchy": "ep101/sq01/sh010"} "app": "maya",
"hierarchy": "ep101/sq01/sh010"}
anatomy_filled = anatomy.format(data) anatomy_filled = anatomy.format(data)
self.log.info(anatomy_filled) self.log.info(anatomy_filled)
@ -31,11 +32,12 @@ class ValidateTemplates(pyblish.api.ContextPlugin):
data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"], data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
"project": {"name": "D001_projectsy", "project": {"name": "D001_projectsy",
"code": "prjY"}, "code": "prjY"},
"ext": "abc", "ext": "abc",
"version": 1, "version": 1,
"task": "lookdev", "task": "lookdev",
"asset": "bob", "asset": "bob",
"hierarchy": "ep101/sq01/bob"} "app": "maya",
"hierarchy": "ep101/sq01/bob"}
anatomy_filled = context.data["anatomy"].format(data) anatomy_filled = context.data["anatomy"].format(data)
self.log.info(anatomy_filled["work"]["folder"]) self.log.info(anatomy_filled["work"]["folder"])

View file

@ -38,7 +38,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
self.log.warning("Deadline REST API url not found.") self.log.warning("Deadline REST API url not found.")
else: else:
argument = "{}/api/pools?NamesOnly=true".format(deadline_url) argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
response = requests.get(argument) response = self._requests_get(argument)
if not response.ok: if not response.ok:
self.log.warning("No pools retrieved") self.log.warning("No pools retrieved")
else: else:
@ -135,7 +135,7 @@ class CreateRenderGlobals(avalon.maya.Creator):
'authToken': self._token 'authToken': self._token
} }
api_entry = '/api/pools/list' api_entry = '/api/pools/list'
response = requests.get( response = self._requests_get(
self.MUSTER_REST_URL + api_entry, params=params) self.MUSTER_REST_URL + api_entry, params=params)
if response.status_code != 200: if response.status_code != 200:
if response.status_code == 401: if response.status_code == 401:

View file

@ -0,0 +1,50 @@
import pyblish.api
import maya.cmds as cmds
import pype.maya.action
class ValidateAssemblyName(pyblish.api.InstancePlugin):
""" Ensure Assembly name ends with `GRP`
Check if assembly name ends with `_GRP` string.
"""
label = "Validate Assembly Name"
order = pyblish.api.ValidatorOrder
families = ["assembly"]
actions = [pype.maya.action.SelectInvalidAction]
active = False
@classmethod
def get_invalid(cls, instance):
cls.log.info("Checking name of {}".format(instance.name))
content_instance = instance.data.get("setMembers", None)
if not content_instance:
cls.log.error("Instance has no nodes!")
return True
# All children will be included in the extracted export so we also
# validate *all* descendents of the set members and we skip any
# intermediate shapes
descendants = cmds.listRelatives(content_instance,
allDescendents=True,
fullPath=True) or []
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
content_instance = list(set(content_instance + descendants))
assemblies = cmds.ls(content_instance, assemblies=True, long=True)
invalid = []
for cr in assemblies:
if not cr.endswith('_GRP'):
cls.log.error("{} doesn't end with _GRP".format(cr))
invalid.append(cr)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Found {} invalid named assembly "
"items".format(len(invalid)))

View file

@ -0,0 +1,98 @@
from maya import cmds
import pyblish.api
import pype.api
import pype.maya.action
import re
class ValidateModelName(pyblish.api.InstancePlugin):
"""Validate name of model
starts with (somename)_###_(materialID)_GEO
materialID must be present in list
padding number doesn't have limit
"""
optional = True
order = pype.api.ValidateContentsOrder
hosts = ["maya"]
families = ["model"]
label = "Model Name"
actions = [pype.maya.action.SelectInvalidAction]
# path to shader names definitions
# TODO: move it to preset file
material_file = None
active = False
regex = '(.*)_(\\d)*_(.*)_(GEO)'
@classmethod
def get_invalid(cls, instance):
# find out if supplied transform is group or not
def is_group(groupName):
try:
children = cmds.listRelatives(groupName, children=True)
for child in children:
if not cmds.ls(child, transforms=True):
return False
return True
except:
return False
invalid = []
content_instance = instance.data.get("setMembers", None)
if not content_instance:
cls.log.error("Instance has no nodes!")
return True
pass
descendants = cmds.listRelatives(content_instance,
allDescendents=True,
fullPath=True) or []
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
trns = cmds.ls(descendants, long=False, type=('transform'))
# filter out groups
filter = [node for node in trns if not is_group(node)]
# load shader list file as utf-8
if cls.material_file:
shader_file = open(cls.material_file, "r")
shaders = shader_file.readlines()
shader_file.close()
# strip line endings from list
shaders = map(lambda s: s.rstrip(), shaders)
# compile regex for testing names
r = re.compile(cls.regex)
for obj in filter:
m = r.match(obj)
if m is None:
cls.log.error("invalid name on: {}".format(obj))
invalid.append(obj)
else:
# if we have shader files and shader named group is in
# regex, test this group against names in shader file
if 'shader' in r.groupindex and shaders:
try:
if not m.group('shader') in shaders:
cls.log.error(
"invalid materialID on: {0} ({1})".format(
obj, m.group('shader')))
invalid.append(obj)
except IndexError:
# shader named group doesn't match
cls.log.error(
"shader group doesn't match: {}".format(obj))
invalid.append(obj)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Model naming is invalid. See log.")

View file

@ -0,0 +1,78 @@
from maya import cmds
import pyblish.api
import pype.api
import pype.maya.action
import re
class ValidateShaderName(pyblish.api.InstancePlugin):
"""Validate shader name assigned.
It should be <assetName>_<*>_SHD
"""
optional = True
active = False
order = pype.api.ValidateContentsOrder
families = ["look"]
hosts = ['maya']
label = 'Validate Shaders Name'
actions = [pype.maya.action.SelectInvalidAction]
regex = r'(?P<asset>.*)_(.*)_SHD'
# The default connections to check
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError("Found shapes with invalid shader names "
"assigned: "
"\n{}".format(invalid))
@classmethod
def get_invalid(cls, instance):
invalid = []
# Get all shapes from the instance
content_instance = instance.data.get("setMembers", None)
if not content_instance:
cls.log.error("Instance has no nodes!")
return True
pass
descendants = cmds.listRelatives(content_instance,
allDescendents=True,
fullPath=True) or []
descendants = cmds.ls(descendants, noIntermediate=True, long=True)
shapes = cmds.ls(descendants, type=["nurbsSurface", "mesh"], long=True)
asset_name = instance.data.get("asset", None)
# Check the number of connected shadingEngines per shape
r = re.compile(cls.regex)
for shape in shapes:
shading_engines = cmds.listConnections(shape,
destination=True,
type="shadingEngine") or []
shaders = cmds.ls(
cmds.listConnections(shading_engines), materials=1
)
for shader in shaders:
m = r.match(cls.regex, shader)
if m is None:
invalid.append(shape)
cls.log.error(
"object {0} has invalid shader name {1}".format(shape,
shader)
)
else:
if 'asset' in r.groupindex:
if m.group('asset') != asset_name:
invalid.append(shape)
cls.log.error(("object {0} has invalid "
"shader name {1}").format(shape,
shader))
return invalid

View file

@ -1,4 +1,6 @@
import sys
import pyblish.api import pyblish.api
import six
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
@ -91,7 +93,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
for instance in instances: for instance in instances:
instance.data['ftrackShotId'] = entity['id'] instance.data['ftrackShotId'] = entity['id']
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# TASKS # TASKS
tasks = entity_data.get('tasks', []) tasks = entity_data.get('tasks', [])
@ -114,7 +121,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
task_type=ftrack_types[task], task_type=ftrack_types[task],
parent=entity parent=entity
) )
self.session.commit()
if 'childs' in entity_data: if 'childs' in entity_data:
self.import_to_ftrack( self.import_to_ftrack(
@ -141,7 +147,12 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
self.log.info(self.task_types) self.log.info(self.task_types)
task['type'] = self.task_types[task_type] task['type'] = self.task_types[task_type]
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return task return task
@ -150,6 +161,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
'name': name, 'name': name,
'parent': parent 'parent': parent
}) })
self.session.commit() try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return entity return entity

View file

@ -1,6 +1,8 @@
import sys
import pyblish.api import pyblish.api
import pype.api import pype.api
import avalon.api import avalon.api
import six
class ValidateAutoSyncOff(pyblish.api.ContextPlugin): class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
@ -48,4 +50,9 @@ class ValidateAutoSyncOff(pyblish.api.ContextPlugin):
session = context.data["ftrackSession"] session = context.data["ftrackSession"]
invalid = cls.get_invalid(context) invalid = cls.get_invalid(context)
invalid['custom_attributes']['avalon_auto_sync'] = False invalid['custom_attributes']['avalon_auto_sync'] = False
session.commit() try:
session.commit()
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
six.reraise(tp, value, tb)