Merge branch 'develop' into bugfix/publish_filesequence

This commit is contained in:
Toke Jepsen 2019-07-21 16:34:53 +01:00
commit d734049a3d
52 changed files with 1708 additions and 755 deletions

View file

@ -0,0 +1,334 @@
import os
import sys
import json
import argparse
import logging
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction
class CustomAttributeDoctor(BaseAction):
#: Action identifier.
identifier = 'custom.attributes.doctor'
#: Action label.
label = 'Custom Attributes Doctor'
#: Action description.
description = (
'Fix hierarchical custom attributes mainly handles, fstart'
' and fend'
)
icon = '{}/ftrack/action_icons/TestAction.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
hierarchical_ca = ['handle_start', 'handle_end', 'fstart', 'fend']
hierarchical_alternatives = {
'handle_start': 'handles',
'handle_end': 'handles'
}
# Roles for new custom attributes
read_roles = ['ALL',]
write_roles = ['ALL',]
data_ca = {
'handle_start': {
'label': 'Frame handles start',
'type': 'number',
'config': json.dumps({'isdecimal': False})
},
'handle_end': {
'label': 'Frame handles end',
'type': 'number',
'config': json.dumps({'isdecimal': False})
},
'fstart': {
'label': 'Frame start',
'type': 'number',
'config': json.dumps({'isdecimal': False})
},
'fend': {
'label': 'Frame end',
'type': 'number',
'config': json.dumps({'isdecimal': False})
}
}
def discover(self, session, entities, event):
''' Validation '''
return True
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
title = 'Select Project to fix Custom attributes'
items = []
item_splitter = {'type': 'label', 'value': '---'}
all_projects = session.query('Project').all()
for project in all_projects:
item_label = {
'type': 'label',
'value': '{} (<i>{}</i>)'.format(
project['full_name'], project['name']
)
}
item = {
'name': project['id'],
'type': 'boolean',
'value': False
}
if len(items) > 0:
items.append(item_splitter)
items.append(item_label)
items.append(item)
if len(items) == 0:
return {
'success': False,
'message': 'Didn\'t found any projects'
}
else:
return {
'items': items,
'title': title
}
def launch(self, session, entities, event):
if 'values' not in event['data']:
return
values = event['data']['values']
projects_to_update = []
for project_id, update_bool in values.items():
if not update_bool:
continue
project = session.query(
'Project where id is "{}"'.format(project_id)
).one()
projects_to_update.append(project)
if not projects_to_update:
self.log.debug('Nothing to update')
return {
'success': True,
'message': 'Nothing to update'
}
self.security_roles = {}
self.to_process = {}
# self.curent_default_values = {}
existing_attrs = session.query('CustomAttributeConfiguration').all()
self.prepare_custom_attributes(existing_attrs)
self.projects_data = {}
for project in projects_to_update:
self.process_data(project)
return True
def process_data(self, entity):
cust_attrs = entity.get('custom_attributes')
if not cust_attrs:
return
for dst_key, src_key in self.to_process.items():
if src_key in cust_attrs:
value = cust_attrs[src_key]
entity['custom_attributes'][dst_key] = value
self.session.commit()
for child in entity.get('children', []):
self.process_data(child)
def prepare_custom_attributes(self, existing_attrs):
to_process = {}
to_create = []
all_keys = {attr['key']: attr for attr in existing_attrs}
for key in self.hierarchical_ca:
if key not in all_keys:
self.log.debug(
'Custom attribute "{}" does not exist at all'.format(key)
)
to_create.append(key)
if key in self.hierarchical_alternatives:
alt_key = self.hierarchical_alternatives[key]
if alt_key in all_keys:
self.log.debug((
'Custom attribute "{}" will use values from "{}"'
).format(key, alt_key))
to_process[key] = alt_key
obj = all_keys[alt_key]
# if alt_key not in self.curent_default_values:
# self.curent_default_values[alt_key] = obj['default']
obj['default'] = None
self.session.commit()
else:
obj = all_keys[key]
new_key = key + '_old'
if obj['is_hierarchical']:
if new_key not in all_keys:
self.log.info((
'Custom attribute "{}" is already hierarchical'
' and can\'t find old one'
).format(key)
)
continue
to_process[key] = new_key
continue
# default_value = obj['default']
# if new_key not in self.curent_default_values:
# self.curent_default_values[new_key] = default_value
obj['key'] = new_key
obj['label'] = obj['label'] + '(old)'
obj['default'] = None
self.session.commit()
to_create.append(key)
to_process[key] = new_key
self.to_process = to_process
for key in to_create:
data = {
'key': key,
'entity_type': 'show',
'is_hierarchical': True,
'default': None
}
for _key, _value in self.data_ca.get(key, {}).items():
if _key == 'type':
_value = self.session.query((
'CustomAttributeType where name is "{}"'
).format(_value)).first()
data[_key] = _value
avalon_group = self.session.query(
'CustomAttributeGroup where name is "avalon"'
).first()
if avalon_group:
data['group'] = avalon_group
read_roles = self.get_security_role(self.read_roles)
write_roles = self.get_security_role(self.write_roles)
data['read_security_roles'] = read_roles
data['write_security_roles'] = write_roles
self.session.create('CustomAttributeConfiguration', data)
self.session.commit()
# def return_back_defaults(self):
# existing_attrs = self.session.query(
# 'CustomAttributeConfiguration'
# ).all()
#
# for attr_key, default in self.curent_default_values.items():
# for attr in existing_attrs:
# if attr['key'] != attr_key:
# continue
# attr['default'] = default
# self.session.commit()
# break
def get_security_role(self, security_roles):
roles = []
if len(security_roles) == 0 or security_roles[0] == 'ALL':
roles = self.get_role_ALL()
elif security_roles[0] == 'except':
excepts = security_roles[1:]
all = self.get_role_ALL()
for role in all:
if role['name'] not in excepts:
roles.append(role)
if role['name'] not in self.security_roles:
self.security_roles[role['name']] = role
else:
for role_name in security_roles:
if role_name in self.security_roles:
roles.append(self.security_roles[role_name])
continue
try:
query = 'SecurityRole where name is "{}"'.format(role_name)
role = self.session.query(query).one()
self.security_roles[role_name] = role
roles.append(role)
except Exception:
self.log.warning(
'Securit role "{}" does not exist'.format(role_name)
)
continue
return roles
def get_role_ALL(self):
role_name = 'ALL'
if role_name in self.security_roles:
all_roles = self.security_roles[role_name]
else:
all_roles = self.session.query('SecurityRole').all()
self.security_roles[role_name] = all_roles
for role in all_roles:
if role['name'] not in self.security_roles:
self.security_roles[role['name']] = role
return all_roles
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
CustomAttributeDoctor(session).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -1,5 +1,6 @@
import os
import sys
import json
import argparse
import logging
import collections
@ -16,13 +17,13 @@ class SyncHierarchicalAttrs(BaseAction):
ca_mongoid = lib.get_ca_mongoid()
#: Action identifier.
identifier = 'sync.hierarchical.attrs'
identifier = 'sync.hierarchical.attrs.local'
#: Action label.
label = 'Sync hierarchical attributes'
label = 'Sync HierAttrs - Local'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format(
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
)
@ -40,67 +41,92 @@ class SyncHierarchicalAttrs(BaseAction):
return False
def launch(self, session, entities, event):
# Collect hierarchical attrs
custom_attributes = {}
all_avalon_attr = session.query(
'CustomAttributeGroup where name is "avalon"'
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' in cust_attr['key']:
continue
if not cust_attr['is_hierarchical']:
continue
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Hierachical attributes'
})
})
session.commit()
if cust_attr['default']:
self.log.warning((
'Custom attribute "{}" has set default value.'
' This attribute can\'t be synchronized'
).format(cust_attr['label']))
continue
custom_attributes[cust_attr['key']] = cust_attr
if not custom_attributes:
msg = 'No hierarchical attributes to sync.'
self.log.debug(msg)
return {
'success': True,
'message': msg
}
entity = entities[0]
if entity.entity_type.lower() == 'project':
project_name = entity['full_name']
else:
project_name = entity['project']['full_name']
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = project_name
for entity in entities:
for key in custom_attributes:
# check if entity has that attribute
if key not in entity['custom_attributes']:
self.log.debug(
'Hierachical attribute "{}" not found on "{}"'.format(
key, entity.get('name', entity)
)
)
try:
# Collect hierarchical attrs
custom_attributes = {}
all_avalon_attr = session.query(
'CustomAttributeGroup where name is "avalon"'
).one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' in cust_attr['key']:
continue
value = self.get_hierarchical_value(key, entity)
if value is None:
self.log.warning(
'Hierarchical attribute "{}" not set on "{}"'.format(
key, entity.get('name', entity)
)
)
if not cust_attr['is_hierarchical']:
continue
self.update_hierarchical_attribute(entity, key, value)
if cust_attr['default']:
self.log.warning((
'Custom attribute "{}" has set default value.'
' This attribute can\'t be synchronized'
).format(cust_attr['label']))
continue
self.db_con.uninstall()
custom_attributes[cust_attr['key']] = cust_attr
if not custom_attributes:
msg = 'No hierarchical attributes to sync.'
self.log.debug(msg)
return {
'success': True,
'message': msg
}
entity = entities[0]
if entity.entity_type.lower() == 'project':
project_name = entity['full_name']
else:
project_name = entity['project']['full_name']
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = project_name
for entity in entities:
for key in custom_attributes:
# check if entity has that attribute
if key not in entity['custom_attributes']:
self.log.debug(
'Hierachical attribute "{}" not found on "{}"'.format(
key, entity.get('name', entity)
)
)
continue
value = self.get_hierarchical_value(key, entity)
if value is None:
self.log.warning(
'Hierarchical attribute "{}" not set on "{}"'.format(
key, entity.get('name', entity)
)
)
continue
self.update_hierarchical_attribute(entity, key, value)
except Exception:
self.log.error(
'Action "{}" failed'.format(self.label),
exc_info=True
)
finally:
self.db_con.uninstall()
if job['status'] in ('queued', 'running'):
job['status'] = 'failed'
session.commit()
return True

View file

@ -6,6 +6,7 @@ import json
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib as ftracklib
from pype.vendor.ftrack_api import session as fa_session
class SyncToAvalon(BaseAction):
@ -176,6 +177,18 @@ class SyncToAvalon(BaseAction):
job['status'] = 'failed'
session.commit()
event = fa_session.ftrack_api.event.base.Event(
topic='ftrack.action.launch',
data=dict(
actionIdentifier='sync.hierarchical.attrs.local',
selection=event['data']['selection']
),
source=dict(
user=event['source']['user']
)
)
session.event_hub.publish(event, on_error='ignore')
if len(message) > 0:
message = "Unable to sync: {}".format(message)
return {

View file

@ -0,0 +1,274 @@
import os
import sys
import json
import argparse
import logging
import collections
from pypeapp import config
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
from bson.objectid import ObjectId
class SyncHierarchicalAttrs(BaseAction):
db_con = DbConnector()
ca_mongoid = lib.get_ca_mongoid()
#: Action identifier.
identifier = 'sync.hierarchical.attrs'
#: Action label.
label = 'Sync HierAttrs'
#: Action description.
description = 'Synchronize hierarchical attributes'
#: Icon
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format(
os.environ.get(
'PYPE_STATICS_SERVER',
'http://localhost:{}'.format(
config.get_presets().get('services', {}).get(
'statics_server', {}
).get('default_port', 8021)
)
)
)
def register(self):
self.session.event_hub.subscribe(
'topic=ftrack.action.discover',
self._discover
)
self.session.event_hub.subscribe(
'topic=ftrack.action.launch and data.actionIdentifier={}'.format(
self.identifier
),
self._launch
)
def discover(self, session, entities, event):
''' Validation '''
role_check = False
discover = False
role_list = ['Pypeclub', 'Administrator', 'Project Manager']
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
for role in user['user_security_roles']:
if role['security_role']['name'] in role_list:
role_check = True
break
print(self.icon)
if role_check is True:
for entity in entities:
context_type = entity.get('context_type', '').lower()
if (
context_type in ('show', 'task') and
entity.entity_type.lower() != 'task'
):
discover = True
break
return discover
def launch(self, session, entities, event):
user = session.query(
'User where id is "{}"'.format(event['source']['user']['id'])
).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Hierachical attributes'
})
})
session.commit()
try:
# Collect hierarchical attrs
custom_attributes = {}
all_avalon_attr = session.query(
'CustomAttributeGroup where name is "avalon"'
).one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' in cust_attr['key']:
continue
if not cust_attr['is_hierarchical']:
continue
if cust_attr['default']:
self.log.warning((
'Custom attribute "{}" has set default value.'
' This attribute can\'t be synchronized'
).format(cust_attr['label']))
continue
custom_attributes[cust_attr['key']] = cust_attr
if not custom_attributes:
msg = 'No hierarchical attributes to sync.'
self.log.debug(msg)
return {
'success': True,
'message': msg
}
entity = entities[0]
if entity.entity_type.lower() == 'project':
project_name = entity['full_name']
else:
project_name = entity['project']['full_name']
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = project_name
for entity in entities:
for key in custom_attributes:
# check if entity has that attribute
if key not in entity['custom_attributes']:
self.log.debug(
'Hierachical attribute "{}" not found on "{}"'.format(
key, entity.get('name', entity)
)
)
continue
value = self.get_hierarchical_value(key, entity)
if value is None:
self.log.warning(
'Hierarchical attribute "{}" not set on "{}"'.format(
key, entity.get('name', entity)
)
)
continue
self.update_hierarchical_attribute(entity, key, value)
except Exception:
self.log.error(
'Action "{}" failed'.format(self.label),
exc_info=True
)
finally:
self.db_con.uninstall()
if job['status'] in ('queued', 'running'):
job['status'] = 'failed'
session.commit()
return True
def get_hierarchical_value(self, key, entity):
value = entity['custom_attributes'][key]
if (
value is not None or
entity.entity_type.lower() == 'project'
):
return value
return self.get_hierarchical_value(key, entity['parent'])
def update_hierarchical_attribute(self, entity, key, value):
if (
entity['context_type'].lower() not in ('show', 'task') or
entity.entity_type.lower() == 'task'
):
return
# collect entity's custom attributes
custom_attributes = entity.get('custom_attributes')
if not custom_attributes:
return
mongoid = custom_attributes.get(self.ca_mongoid)
if not mongoid:
self.log.debug('Entity "{}" is not synchronized to avalon.'.format(
entity.get('name', entity)
))
return
try:
mongoid = ObjectId(mongoid)
except Exception:
self.log.warning('Entity "{}" has stored invalid MongoID.'.format(
entity.get('name', entity)
))
return
# Find entity in Mongo DB
mongo_entity = self.db_con.find_one({'_id': mongoid})
if not mongo_entity:
self.log.warning(
'Entity "{}" is not synchronized to avalon.'.format(
entity.get('name', entity)
)
)
return
# Change value if entity has set it's own
entity_value = custom_attributes[key]
if entity_value is not None:
value = entity_value
data = mongo_entity.get('data') or {}
data[key] = value
self.db_con.update_many(
{'_id': mongoid},
{'$set': {'data': data}}
)
for child in entity.get('children', []):
self.update_hierarchical_attribute(child, key, value)
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncHierarchicalAttrs(session).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -3,8 +3,11 @@ import sys
import argparse
import logging
import json
from pypeapp import config
from pype.vendor import ftrack_api
from pype.ftrack import BaseAction, lib
from pype.vendor.ftrack_api import session as fa_session
class Sync_To_Avalon(BaseAction):
@ -50,7 +53,14 @@ class Sync_To_Avalon(BaseAction):
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = '{}/ftrack/action_icons/SyncToAvalon.svg'.format(
os.environ.get('PYPE_STATICS_SERVER', '')
os.environ.get(
'PYPE_STATICS_SERVER',
'http://localhost:{}'.format(
config.get_presets().get('services', {}).get(
'statics_server', {}
).get('default_port', 8021)
)
)
)
def register(self):
@ -70,7 +80,7 @@ class Sync_To_Avalon(BaseAction):
''' Validation '''
roleCheck = False
discover = False
roleList = ['Administrator', 'Project Manager']
roleList = ['Pypeclub', 'Administrator', 'Project Manager']
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
@ -191,6 +201,24 @@ class Sync_To_Avalon(BaseAction):
' - Please check Log for more information'
)
finally:
if job['status'] in ['queued', 'running']:
job['status'] = 'failed'
session.commit()
event = fa_session.ftrack_api.event.base.Event(
topic='ftrack.action.launch',
data=dict(
actionIdentifier='sync.hierarchical.attrs',
selection=event['data']['selection']
),
source=dict(
user=event['source']['user']
)
)
session.event_hub.publish(event, on_error='ignore')
if len(message) > 0:
message = "Unable to sync: {}".format(message)
return {

View file

@ -0,0 +1,239 @@
from pype.vendor import ftrack_api
from pype.ftrack import BaseEvent, lib
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
from bson.objectid import ObjectId
from pypeapp import config
from pypeapp import Anatomy
import subprocess
import os
import re
class UserAssigmentEvent(BaseEvent):
"""
This script will intercept user assigment / de-assigment event and
run shell script, providing as much context as possible.
It expects configuration file ``presets/ftrack/user_assigment_event.json``.
In it, you define paths to scripts to be run for user assigment event and
for user-deassigment::
{
"add": [
"/path/to/script1",
"/path/to/script2"
],
"remove": [
"/path/to/script3",
"/path/to/script4"
]
}
Those scripts are executed in shell. Three arguments will be passed to
to them:
1) user name of user (de)assigned
2) path to workfiles of task user was (de)assigned to
3) path to publish files of task user was (de)assigned to
"""
db_con = DbConnector()
ca_mongoid = lib.get_ca_mongoid()
def error(self, *err):
for e in err:
self.log.error(e)
def _run_script(self, script, args):
"""
Run shell script with arguments as subprocess
:param script: script path
:type script: str
:param args: list of arguments passed to script
:type args: list
:returns: return code
:rtype: int
"""
p = subprocess.call([script, args], shell=True)
return p
def _get_task_and_user(self, session, action, changes):
"""
Get Task and User entities from Ftrack session
:param session: ftrack session
:type session: ftrack_api.session
:param action: event action
:type action: str
:param changes: what was changed by event
:type changes: dict
:returns: User and Task entities
:rtype: tuple
"""
if not changes:
return None, None
if action == 'add':
task_id = changes.get('context_id', {}).get('new')
user_id = changes.get('resource_id', {}).get('new')
elif action == 'remove':
task_id = changes.get('context_id', {}).get('old')
user_id = changes.get('resource_id', {}).get('old')
if not task_id:
return None, None
if not user_id:
return None, None
task = session.query('Task where id is "{}"'.format(task_id)).one()
user = session.query('User where id is "{}"'.format(user_id)).one()
return task, user
def _get_asset(self, task):
"""
Get asset from task entity
:param task: Task entity
:type task: dict
:returns: Asset entity
:rtype: dict
"""
parent = task['parent']
self.db_con.install()
self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
avalon_entity = None
parent_id = parent['custom_attributes'].get(self.ca_mongoid)
if parent_id:
parent_id = ObjectId(parent_id)
avalon_entity = self.db_con.find_one({
'_id': parent_id,
'type': 'asset'
})
if not avalon_entity:
avalon_entity = self.db_con.find_one({
'type': 'asset',
'name': parent['name']
})
if not avalon_entity:
self.db_con.uninstall()
msg = 'Entity "{}" not found in avalon database'.format(
parent['name']
)
self.error(msg)
return {
'success': False,
'message': msg
}
self.db_con.uninstall()
return avalon_entity
def _get_hierarchy(self, asset):
"""
Get hierarchy from Asset entity
:param asset: Asset entity
:type asset: dict
:returns: hierarchy string
:rtype: str
"""
return asset['data']['hierarchy']
def _get_template_data(self, task):
"""
Get data to fill template from task
.. seealso:: :mod:`pypeapp.Anatomy`
:param task: Task entity
:type task: dict
:returns: data for anatomy template
:rtype: dict
"""
project_name = task['project']['full_name']
project_code = task['project']['name']
try:
root = os.environ['PYPE_STUDIO_PROJECTS_PATH']
except KeyError:
msg = 'Project ({}) root not set'.format(project_name)
self.log.error(msg)
return {
'success': False,
'message': msg
}
# fill in template data
asset = self._get_asset(task)
t_data = {
'root': root,
'project': {
'name': project_name,
'code': project_code
},
'asset': asset['name'],
'task': task['name'],
'hierarchy': self._get_hierarchy(asset)
}
return t_data
def launch(self, session, event):
# load shell scripts presets
presets = config.get_presets()['ftrack']["user_assigment_event"]
if not presets:
return
for entity in event.get('data', {}).get('entities', []):
if entity.get('entity_type') != 'Appointment':
continue
task, user = self._get_task_and_user(session,
entity.get('action'),
entity.get('changes'))
if not task or not user:
self.log.error(
'Task or User was not found.')
continue
data = self._get_template_data(task)
# format directories to pass to shell script
anatomy = Anatomy(data["project"]["name"])
# formatting work dir is easiest part as we can use whole path
work_dir = anatomy.format(data)['avalon']['work']
# we also need publish but not whole
publish = anatomy.format_all(data)['partial']['avalon']['publish']
# now find path to {asset}
m = re.search("(^.+?{})".format(data['asset']),
publish)
if not m:
msg = 'Cannot get part of publish path {}'.format(publish)
self.log.error(msg)
return {
'success': False,
'message': msg
}
publish_dir = m.group(1)
for script in presets.get(entity.get('action')):
self.log.info(
'[{}] : running script for user {}'.format(
entity.get('action'), user["username"]))
self._run_script(script, [user["username"],
work_dir, publish_dir])
return True
def register(session, **kw):
"""
Register plugin. Called when used as an plugin.
"""
if not isinstance(session, ftrack_api.session.Session):
return
UserAssigmentEvent(session).register()

View file

@ -343,8 +343,6 @@ def reset_frame_range_handles():
"""Set frame range to current asset"""
root = nuke.root()
fps = float(api.Session.get("AVALON_FPS", 25))
root["fps"].setValue(fps)
name = api.Session["AVALON_ASSET"]
asset = io.find_one({"name": name, "type": "asset"})
@ -356,7 +354,7 @@ def reset_frame_range_handles():
data = asset["data"]
missing_cols = []
check_cols = ["fstart", "fend", "handle_start", "handle_end"]
check_cols = ["fps", "fstart", "fend", "handle_start", "handle_end"]
for col in check_cols:
if col not in data:
@ -373,20 +371,29 @@ def reset_frame_range_handles():
handles = avalon.nuke.get_handles(asset)
handle_start, handle_end = pype.get_handle_irregular(asset)
log.info("__ handles: `{}`".format(handles))
log.info("__ handle_start: `{}`".format(handle_start))
log.info("__ handle_end: `{}`".format(handle_end))
fps = asset["data"]["fps"]
edit_in = int(asset["data"]["fstart"]) - handle_start
edit_out = int(asset["data"]["fend"]) + handle_end
root["fps"].setValue(fps)
root["first_frame"].setValue(edit_in)
root["last_frame"].setValue(edit_out)
log.info("__ handles: `{}`".format(handles))
log.info("__ handle_start: `{}`".format(handle_start))
log.info("__ handle_end: `{}`".format(handle_end))
log.info("__ edit_in: `{}`".format(edit_in))
log.info("__ edit_out: `{}`".format(edit_out))
log.info("__ fps: `{}`".format(fps))
# setting active viewers
nuke.frame(int(asset["data"]["fstart"]))
vv = nuke.activeViewer().node()
try:
vv = nuke.activeViewer().node()
except AttributeError:
log.error("No active viewer. Select any node and hit num `1`")
return
range = '{0}-{1}'.format(
int(asset["data"]["fstart"]),

View file

@ -18,6 +18,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
ftrack_log = logging.getLogger('ftrack_api')
ftrack_log.setLevel(logging.WARNING)
ftrack_log = logging.getLogger('ftrack_api_old')
ftrack_log.setLevel(logging.WARNING)
# Collect session
session = ftrack_api.Session()
context.data["ftrackSession"] = session

View file

@ -49,14 +49,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
for comp in instance.data['representations']:
self.log.debug('component {}'.format(comp))
if comp.get('thumbnail'):
if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])):
location = self.get_ftrack_location(
'ftrack.server', ft_session
)
component_data = {
"name": "thumbnail" # Default component name is "main".
}
elif comp.get('preview'):
comp['thumbnail'] = True
elif comp.get('preview') or ("preview" in comp.get('tags', [])):
'''
Ftrack bug requirement:
- Start frame must be 0
@ -120,7 +121,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
componentList.append(component_item)
# Create copy with ftrack.unmanaged location if thumb or prev
if comp.get('thumbnail') or comp.get('preview'):
if comp.get('thumbnail') or comp.get('preview') \
or ("preview" in comp.get('tags', [])) \
or ("thumbnail" in comp.get('tags', [])):
unmanaged_loc = self.get_ftrack_location(
'ftrack.unmanaged', ft_session
)
@ -148,7 +151,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
componentList.append(component_item_src)
self.log.debug('componentsList: {}'.format(str(componentList)))
instance.data["ftrackComponentsList"] = componentList

View file

@ -0,0 +1,86 @@
import os
import pyblish.api
import subprocess
from pype.vendor import clique
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Extract Quicktime"
order = pyblish.api.ExtractorOrder
families = ["imagesequence", "render", "write", "source"]
hosts = ["shell"]
def process(self, instance):
# fps = instance.data.get("fps")
# start = instance.data.get("startFrame")
# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
#
# collected_frames = os.listdir(stagingdir)
# collections, remainder = clique.assemble(collected_frames)
#
# full_input_path = os.path.join(
# stagingdir, collections[0].format('{head}{padding}{tail}')
# )
# self.log.info("input {}".format(full_input_path))
#
# filename = collections[0].format('{head}')
# if not filename.endswith('.'):
# filename += "."
# movFile = filename + "mov"
# full_output_path = os.path.join(stagingdir, movFile)
#
# self.log.info("output {}".format(full_output_path))
#
# config_data = instance.context.data['output_repre_config']
#
# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
# profile = config_data.get(proj_name, config_data['__default__'])
#
# input_args = []
# # overrides output file
# input_args.append("-y")
# # preset's input data
# input_args.extend(profile.get('input', []))
# # necessary input data
# input_args.append("-start_number {}".format(start))
# input_args.append("-i {}".format(full_input_path))
# input_args.append("-framerate {}".format(fps))
#
# output_args = []
# # preset's output data
# output_args.extend(profile.get('output', []))
# # output filename
# output_args.append(full_output_path)
# mov_args = [
# "ffmpeg",
# " ".join(input_args),
# " ".join(output_args)
# ]
# subprocess_mov = " ".join(mov_args)
# sub_proc = subprocess.Popen(subprocess_mov)
# sub_proc.wait()
#
# if not os.path.isfile(full_output_path):
# raise("Quicktime wasn't created succesfully")
#
# if "representations" not in instance.data:
# instance.data["representations"] = []
#
# representation = {
# 'name': 'mov',
# 'ext': 'mov',
# 'files': movFile,
# "stagingDir": stagingdir,
# "preview": True
# }
# instance.data["representations"].append(representation)

View file

@ -1,92 +0,0 @@
# import os
# import pyblish.api
# import subprocess
# from pype.vendor import clique
# from pypeapp import config
#
#
# class ExtractReview(pyblish.api.InstancePlugin):
# """Resolve any dependency issies
#
# This plug-in resolves any paths which, if not updated might break
# the published file.
#
# The order of families is important, when working with lookdev you want to
# first publish the texture, update the texture paths in the nodes and then
# publish the shading network. Same goes for file dependent assets.
# """
#
# label = "Extract Review"
# order = pyblish.api.ExtractorOrder
# # families = ["imagesequence", "render", "write", "source"]
# # hosts = ["shell"]
#
# def process(self, instance):
# # adding plugin attributes from presets
# publish_presets = config.get_presets()["plugins"]["global"]["publish"]
# plugin_attrs = publish_presets[self.__class__.__name__]
#
#
# fps = instance.data.get("fps")
# start = instance.data.get("startFrame")
# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
#
# collected_frames = os.listdir(stagingdir)
# collections, remainder = clique.assemble(collected_frames)
#
# full_input_path = os.path.join(
# stagingdir, collections[0].format('{head}{padding}{tail}')
# )
# self.log.info("input {}".format(full_input_path))
#
# filename = collections[0].format('{head}')
# if not filename.endswith('.'):
# filename += "."
# movFile = filename + "mov"
# full_output_path = os.path.join(stagingdir, movFile)
#
# self.log.info("output {}".format(full_output_path))
#
# config_data = instance.context.data['output_repre_config']
#
# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
# profile = config_data.get(proj_name, config_data['__default__'])
#
# input_args = []
# # overrides output file
# input_args.append("-y")
# # preset's input data
# input_args.extend(profile.get('input', []))
# # necessary input data
# input_args.append("-start_number {}".format(start))
# input_args.append("-i {}".format(full_input_path))
# input_args.append("-framerate {}".format(fps))
#
# output_args = []
# # preset's output data
# output_args.extend(profile.get('output', []))
# # output filename
# output_args.append(full_output_path)
# mov_args = [
# "ffmpeg",
# " ".join(input_args),
# " ".join(output_args)
# ]
# subprocess_mov = " ".join(mov_args)
# sub_proc = subprocess.Popen(subprocess_mov)
# sub_proc.wait()
#
# if not os.path.isfile(full_output_path):
# raise("Quicktime wasn't created succesfully")
#
# if "representations" not in instance.data:
# instance.data["representations"] = []
#
# representation = {
# 'name': 'mov',
# 'ext': 'mov',
# 'files': movFile,
# "stagingDir": stagingdir,
# "preview": True
# }
# instance.data["representations"].append(representation)

View file

@ -6,6 +6,7 @@ from pprint import pformat
import pyblish.api
from avalon import api
import pype.api as pype
def collect(root,
@ -64,7 +65,7 @@ def collect(root,
return collections
class CollectFileSequences(pyblish.api.ContextPlugin):
class CollectRenderedFrames(pyblish.api.ContextPlugin):
"""Gather file sequences from working directory
When "FILESEQUENCE" environment variable is set these paths (folders or
@ -87,7 +88,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder
targets = ["filesequence"]
label = "File Sequences"
label = "RenderedFrames"
def process(self, context):
if os.environ.get("PYPE_PUBLISH_PATHS"):
@ -128,6 +129,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
self.log.info("setting session using metadata")
api.Session.update(session)
os.environ.update(session)
else:
# Search in directory
data = dict()
@ -161,6 +163,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
assert isinstance(families, (list, tuple)), "Must be iterable"
assert families, "Must have at least a single family"
families.append("ftrack")
families.append("review")
for collection in collections:
instance = context.create_instance(str(collection))
self.log.info("Collection: %s" % list(collection))
@ -205,7 +208,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
'files': list(collection),
"stagingDir": root,
"anatomy_template": "render",
"frameRate": fps
"frameRate": fps,
"tags": ['review']
}
instance.data["representations"].append(representation)

View file

@ -2,6 +2,7 @@ import os
import subprocess
import pype.api
import json
import pyblish
class ExtractBurnin(pype.api.Extractor):
@ -14,7 +15,8 @@ class ExtractBurnin(pype.api.Extractor):
"""
label = "Quicktime with burnins"
families = ["burnin"]
order = pyblish.api.ExtractorOrder + 0.03
families = ["review", "burnin"]
optional = True
def process(self, instance):
@ -22,42 +24,68 @@ class ExtractBurnin(pype.api.Extractor):
raise RuntimeError("Burnin needs already created mov to work on.")
# TODO: expand burnin data list to include all usefull keys
burnin_data = {
version = ''
if instance.context.data.get('version'):
version = "v" + str(instance.context.data['version'])
prep_data = {
"username": instance.context.data['user'],
"asset": os.environ['AVALON_ASSET'],
"task": os.environ['AVALON_TASK'],
"start_frame": int(instance.data['startFrame']),
"version": "v" + str(instance.context.data['version'])
"version": version
}
self.log.debug("__ prep_data: {}".format(prep_data))
for i, repre in enumerate(instance.data["representations"]):
self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre))
for repre in instance.data["representations"]:
if (not repre.get("burnin", False) or
"burnin" not in repre.get("tags", [])):
if "burnin" not in repre.get("tags", []):
continue
stagingdir = self.staging_dir(instance)
stagingdir = repre["stagingDir"]
filename = "{0}".format(repre["files"])
movieFileBurnin = filename + "Burn" + ".mov"
name = "_burnin"
movieFileBurnin = filename.replace(".mov", "") + name + ".mov"
full_movie_path = os.path.join(stagingdir, repre["files"])
full_burnin_path = os.path.join(stagingdir, movieFileBurnin)
full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"])
full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin)
self.log.debug("__ full_burnin_path: {}".format(full_burnin_path))
burnin_data = {
"input": full_movie_path.replace("\\", "/"),
"output": full_burnin_path.replace("\\", "/"),
"burnin_data": burnin_data
}
"burnin_data": prep_data
}
self.log.debug("__ burnin_data2: {}".format(burnin_data))
json_data = json.dumps(burnin_data)
scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'],
scriptpath = os.path.normpath(os.path.join(os.environ['PYPE_MODULE_ROOT'],
"pype",
"scripts",
"otio_burnin.py")
"otio_burnin.py"))
p = subprocess.Popen(
['python', scriptpath, json_data]
)
p.wait()
self.log.debug("__ scriptpath: {}".format(scriptpath))
self.log.debug("__ EXE: {}".format(os.getenv("PYPE_PYTHON_EXE")))
repre['files']: movieFileBurnin
try:
p = subprocess.Popen(
[os.getenv("PYPE_PYTHON_EXE"), scriptpath, json_data]
)
p.wait()
if not os.path.isfile(full_burnin_path):
raise RuntimeError("File not existing: {}".format(full_burnin_path))
except Exception as e:
raise RuntimeError("Burnin script didn't work: `{}`".format(e))
if os.path.exists(full_burnin_path):
repre_update = {
"files": movieFileBurnin,
"name": repre["name"]
}
instance.data["representations"][i].update(repre_update)
# removing the source mov file
os.remove(full_movie_path)
self.log.debug("Removed: `{}`".format(full_movie_path))

View file

@ -1,86 +0,0 @@
import os
import pyblish.api
import subprocess
from pype.vendor import clique
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Extract Quicktime"
order = pyblish.api.ExtractorOrder
families = ["imagesequence", "render", "write", "source"]
hosts = ["shell"]
def process(self, instance):
fps = instance.data.get("fps")
start = instance.data.get("startFrame")
stagingdir = os.path.normpath(instance.data.get("stagingDir"))
collected_frames = os.listdir(stagingdir)
collections, remainder = clique.assemble(collected_frames)
full_input_path = os.path.join(
stagingdir, collections[0].format('{head}{padding}{tail}')
)
self.log.info("input {}".format(full_input_path))
filename = collections[0].format('{head}')
if not filename.endswith('.'):
filename += "."
movFile = filename + "mov"
full_output_path = os.path.join(stagingdir, movFile)
self.log.info("output {}".format(full_output_path))
config_data = instance.context.data['output_repre_config']
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
profile = config_data.get(proj_name, config_data['__default__'])
input_args = []
# overrides output file
input_args.append("-y")
# preset's input data
input_args.extend(profile.get('input', []))
# necessary input data
input_args.append("-start_number {}".format(start))
input_args.append("-i {}".format(full_input_path))
input_args.append("-framerate {}".format(fps))
output_args = []
# preset's output data
output_args.extend(profile.get('output', []))
# output filename
output_args.append(full_output_path)
mov_args = [
"ffmpeg",
" ".join(input_args),
" ".join(output_args)
]
subprocess_mov = " ".join(mov_args)
sub_proc = subprocess.Popen(subprocess_mov)
sub_proc.wait()
if not os.path.isfile(full_output_path):
raise("Quicktime wasn't created succesfully")
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'mov',
'ext': 'mov',
'files': movFile,
"stagingDir": stagingdir,
"preview": True
}
instance.data["representations"].append(representation)

View file

@ -0,0 +1,174 @@
import os
import pyblish.api
import subprocess
from pype.vendor import clique
from pypeapp import config
class ExtractReview(pyblish.api.InstancePlugin):
"""Extracting Review mov file for Ftrack
Compulsory attribute of representation is tags list with "review",
otherwise the representation is ignored.
All new represetnations are created and encoded by ffmpeg following
presets found in `pype-config/presets/plugins/global/publish.json:ExtractReview:outputs`. To change the file extension
filter values use preset's attributes `ext_filter`
"""
label = "Extract Review"
order = pyblish.api.ExtractorOrder + 0.02
families = ["review"]
def process(self, instance):
# adding plugin attributes from presets
publish_presets = config.get_presets()["plugins"]["global"]["publish"]
plugin_attrs = publish_presets[self.__class__.__name__]
output_profiles = plugin_attrs.get("outputs", {})
inst_data = instance.data
fps = inst_data.get("fps")
start_frame = inst_data.get("startFrame")
self.log.debug("Families In: `{}`".format(instance.data["families"]))
# get representation and loop them
representations = instance.data["representations"]
# filter out mov and img sequences
representations_new = representations[:]
for repre in representations:
if repre['ext'] in plugin_attrs["ext_filter"]:
tags = repre.get("tags", [])
self.log.info("Try repre: {}".format(repre))
if "review" in tags:
staging_dir = repre["stagingDir"]
for name, profile in output_profiles.items():
self.log.debug("Profile name: {}".format(name))
ext = profile.get("ext", None)
if not ext:
ext = "mov"
self.log.warning(
"`ext` attribute not in output profile. Setting to default ext: `mov`")
self.log.debug("instance.families: {}".format(instance.data['families']))
self.log.debug("profile.families: {}".format(profile['families']))
if any(item in instance.data['families'] for item in profile['families']):
if isinstance(repre["files"], list):
collections, remainder = clique.assemble(
repre["files"])
full_input_path = os.path.join(
staging_dir, collections[0].format(
'{head}{padding}{tail}')
)
filename = collections[0].format('{head}')
if filename.endswith('.'):
filename = filename[:-1]
else:
full_input_path = os.path.join(
staging_dir, repre["files"])
filename = repre["files"].split(".")[0]
repr_file = filename + "_{0}.{1}".format(name, ext)
full_output_path = os.path.join(
staging_dir, repr_file)
self.log.info("input {}".format(full_input_path))
self.log.info("output {}".format(full_output_path))
repre_new = repre.copy()
new_tags = tags[:]
p_tags = profile.get('tags', [])
self.log.info("p_tags: `{}`".format(p_tags))
# add families
[instance.data["families"].append(t)
for t in p_tags
if t not in instance.data["families"]]
# add to
[new_tags.append(t) for t in p_tags
if t not in new_tags]
self.log.info("new_tags: `{}`".format(new_tags))
input_args = []
# overrides output file
input_args.append("-y")
# preset's input data
input_args.extend(profile.get('input', []))
# necessary input data
# adds start arg only if image sequence
if "mov" not in repre_new['ext']:
input_args.append("-start_number {0} -framerate {1}".format(
start_frame, fps))
input_args.append("-i {}".format(full_input_path))
output_args = []
# preset's output data
output_args.extend(profile.get('output', []))
# letter_box
# TODO: add to documentation
lb = profile.get('letter_box', None)
if lb:
output_args.append(
"-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
# output filename
output_args.append(full_output_path)
mov_args = [
"ffmpeg",
" ".join(input_args),
" ".join(output_args)
]
subprcs_cmd = " ".join(mov_args)
# run subprocess
self.log.debug("{}".format(subprcs_cmd))
sub_proc = subprocess.Popen(subprcs_cmd)
sub_proc.wait()
if not os.path.isfile(full_output_path):
raise FileExistsError(
"Quicktime wasn't created succesfully")
# create representation data
repre_new.update({
'name': name,
'ext': ext,
'files': repr_file,
"tags": new_tags,
"outputName": name
})
if repre_new.get('preview'):
repre_new.pop("preview")
if repre_new.get('thumbnail'):
repre_new.pop("thumbnail")
# adding representation
representations_new.append(repre_new)
# if "delete" in tags:
# if "mov" in full_input_path:
# os.remove(full_input_path)
# self.log.debug("Removed: `{}`".format(full_input_path))
else:
continue
else:
continue
self.log.debug(
"new representations: {}".format(representations_new))
instance.data["representations"] = representations_new
self.log.debug("Families Out: `{}`".format(instance.data["families"]))

View file

@ -99,18 +99,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# \ /
# o __/
#
for result in context.data["results"]:
if not result["success"]:
self.log.debug(result)
exc_type, exc_value, exc_traceback = result["error_info"]
extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
self.log.debug(
"Error at line {}: \"{}\"".format(
extracted_traceback[1], result["error"]
)
)
assert all(result["success"] for result in context.data["results"]), (
"Atomicity not held, aborting.")
# for result in context.data["results"]:
# if not result["success"]:
# self.log.debug(result)
# exc_type, exc_value, exc_traceback = result["error_info"]
# extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
# self.log.debug(
# "Error at line {}: \"{}\"".format(
# extracted_traceback[1], result["error"]
# )
# )
# assert all(result["success"] for result in context.data["results"]), (
# "Atomicity not held, aborting.")
# Assemble
#
@ -225,17 +225,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents)
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"task": TASK,
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": int(version["name"]),
"hierarchy": hierarchy}
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
@ -257,6 +246,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# | ||
# |_______|
#
# create template data for Anatomy
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"task": TASK,
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": int(version["name"]),
"hierarchy": hierarchy}
files = repre['files']
if repre.get('stagingDir'):
@ -286,8 +286,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
os.path.normpath(
anatomy_filled[template_name]["path"])
)
self.log.debug(
"test_dest_files: {}".format(str(test_dest_files)))
self.log.debug(
"test_dest_files: {}".format(str(test_dest_files)))
dst_collections, remainder = clique.assemble(test_dest_files)
dst_collection = dst_collections[0]
@ -342,6 +343,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template_data["representation"] = repre['ext']
if repre.get("outputName"):
template_data["output"] = repre['outputName']
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = os.path.normpath(

View file

@ -276,7 +276,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"families": ["render"],
"source": source,
"user": context.data["user"],
"version": context.data["version"],
# Optional metadata (for debugging)
"metadata": {
"instance": data,

View file

@ -1,63 +0,0 @@
import pype.maya.plugin
import os
from pypeapp import config
class AbcLoader(pype.maya.plugin.ReferenceLoader):
"""Specific loader of Alembic for the pype.animation family"""
families = ["animation",
"pointcache"]
label = "Reference animation"
representations = ["abc"]
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "animation"
groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName=groupName,
reference=True,
returnNewNodes=True)
nodes.pop(0)
roots = set()
for node in nodes:
try:
roots.add(cmds.ls(node, long=True)[0].split('|')[2])
except:
pass
cmds.parent(roots, world=True)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
cmds.parent(roots, groupName)
nodes.append(groupName)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return nodes
def switch(self, container, representation):
self.update(container, representation)

View file

@ -0,0 +1,105 @@
from avalon import api
import pype.maya.plugin
import os
from pypeapp import config
import pymel.core as pm
reload(config)
class GpuCacheLoader(api.Loader):
"""Load model Alembic as gpuCache"""
families = ["model"]
representations = ["abc"]
label = "Import Gpu Cache"
order = -5
icon = "code-fork"
color = "orange"
def load(self, context, name, namespace, data):
import maya.cmds as cmds
import avalon.maya.lib as lib
from avalon.maya.pipeline import containerise
asset = context['asset']['name']
namespace = namespace or lib.unique_namespace(
asset + "_",
prefix="_" if asset[0].isdigit() else "",
suffix="_",
)
cmds.loadPlugin("gpuCache", quiet=True)
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get('model')
if c is not None:
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
c[0], c[1], c[2])
# Create transform with shape
transform_name = label + "_GPU"
transform = cmds.createNode("transform", name=transform_name,
parent=root)
cache = cmds.createNode("gpuCache",
parent=transform,
name="{0}Shape".format(transform_name))
# Set the cache filepath
cmds.setAttr(cache + '.cacheFileName', self.fname, type="string")
cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
# Lock parenting of the transform and cache
cmds.lockNode([transform, cache], lock=True)
nodes = [root, transform, cache]
self[:] = nodes
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def update(self, container, representation):
import maya.cmds as cmds
path = api.get_representation_path(representation)
# Update the cache
members = cmds.sets(container['objectName'], query=True)
caches = cmds.ls(members, type="gpuCache", long=True)
assert len(caches) == 1, "This is a bug"
for cache in caches:
cmds.setAttr(cache + ".cacheFileName", path, type="string")
cmds.setAttr(container["objectName"] + ".representation",
str(representation["_id"]),
type="string")
def switch(self, container, representation):
self.update(container, representation)
def remove(self, container):
import maya.cmds as cmds
members = cmds.sets(container['objectName'], query=True)
cmds.lockNode(members, lock=False)
cmds.delete([container['objectName']] + members)
# Clean up the namespace
try:
cmds.namespace(removeNamespace=container['namespace'],
deleteNamespaceContent=True)
except RuntimeError:
pass

View file

@ -1,219 +0,0 @@
from avalon import api
import pype.maya.plugin
import os
from pypeapp import config
reload(config)
class ModelLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
families = ["model"]
representations = ["ma"]
tool_names = ["loader"]
label = "Reference Model"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
from avalon import maya
with maya.maintained_selection():
groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
groupReference=True,
groupName=groupName,
reference=True,
returnNewNodes=True)
nodes.pop(0)
roots = set()
for node in nodes:
try:
roots.add(cmds.ls(node, long=True)[0].split('|')[2])
except:
pass
cmds.parent(roots, world=True)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
cmds.parent(roots, groupName)
nodes.append(groupName)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get('model')
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return nodes
def switch(self, container, representation):
self.update(container, representation)
class GpuCacheLoader(api.Loader):
"""Load model Alembic as gpuCache"""
families = ["model"]
representations = ["abc"]
label = "Import Gpu Cache"
order = -5
icon = "code-fork"
color = "orange"
def load(self, context, name, namespace, data):
import maya.cmds as cmds
import avalon.maya.lib as lib
from avalon.maya.pipeline import containerise
asset = context['asset']['name']
namespace = namespace or lib.unique_namespace(
asset + "_",
prefix="_" if asset[0].isdigit() else "",
suffix="_",
)
cmds.loadPlugin("gpuCache", quiet=True)
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get('model')
if c is not None:
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
c[0], c[1], c[2])
# Create transform with shape
transform_name = label + "_GPU"
transform = cmds.createNode("transform", name=transform_name,
parent=root)
cache = cmds.createNode("gpuCache",
parent=transform,
name="{0}Shape".format(transform_name))
# Set the cache filepath
cmds.setAttr(cache + '.cacheFileName', self.fname, type="string")
cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root
# Lock parenting of the transform and cache
cmds.lockNode([transform, cache], lock=True)
nodes = [root, transform, cache]
self[:] = nodes
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__)
def update(self, container, representation):
import maya.cmds as cmds
path = api.get_representation_path(representation)
# Update the cache
members = cmds.sets(container['objectName'], query=True)
caches = cmds.ls(members, type="gpuCache", long=True)
assert len(caches) == 1, "This is a bug"
for cache in caches:
cmds.setAttr(cache + ".cacheFileName", path, type="string")
cmds.setAttr(container["objectName"] + ".representation",
str(representation["_id"]),
type="string")
def switch(self, container, representation):
self.update(container, representation)
def remove(self, container):
import maya.cmds as cmds
members = cmds.sets(container['objectName'], query=True)
cmds.lockNode(members, lock=False)
cmds.delete([container['objectName']] + members)
# Clean up the namespace
try:
cmds.namespace(removeNamespace=container['namespace'],
deleteNamespaceContent=True)
except RuntimeError:
pass
class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
"""Specific loader of Alembic for the studio.animation family"""
families = ["model"]
representations = ["abc"]
tool_names = ["loader"]
label = "Reference Model"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName="{}:{}".format(namespace, name),
reference=True,
returnNewNodes=True)
namespace = cmds.referenceQuery(nodes[0], namespace=True)
nodes.pop(0)
roots = set()
for node in nodes:
try:
roots.add(cmds.ls(node, long=True)[0].split('|')[2])
except:
pass
cmds.parent(roots, world=True)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
cmds.parent(roots, groupName)
nodes.append(groupName)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get('model')
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return roots
def switch(self, container, representation):
self.update(container, representation)

View file

@ -0,0 +1,85 @@
from avalon import api
import pype.maya.plugin
import os
from pypeapp import config
import pymel.core as pm
reload(config)
class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
families = ["model", "pointcache", "animation"]
representations = ["ma", "abc"]
tool_names = ["loader"]
label = "Reference"
order = -10
icon = "code-fork"
color = "orange"
def process_reference(self, context, name, namespace, data):
import maya.cmds as cmds
from avalon import maya
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "model"
with maya.maintained_selection():
groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName="{}:{}".format(namespace, name),
reference=True,
returnNewNodes=True)
namespace = cmds.referenceQuery(nodes[0], namespace=True)
groupNode = pm.PyNode(groupName)
roots = set()
print(nodes)
for node in nodes:
try:
roots.add(pm.PyNode(node).getAllParents()[-2])
except:
pass
for root in roots:
root.setParent(world=True)
groupNode.root().zeroTransformPivots()
for root in roots:
root.setParent(groupNode)
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
colors = presets['plugins']['maya']['load']['colors']
c = colors.get(family)
if c is not None:
groupNode.useOutlinerColor.set(1)
groupNode.outlinerColor.set(c[0], c[1], c[2])
self[:] = nodes
return nodes
def switch(self, container, representation):
self.update(container, representation)
# for backwards compatibility
class AbcLoader(ReferenceLoader):
families = ["pointcache", "animation"]
representations = ["abc"]
tool_names = []
# for backwards compatibility
class ModelLoader(ReferenceLoader):
families = ["model", "pointcache"]
representations = ["abc"]
tool_names = []

View file

@ -42,7 +42,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
data['representations'] = [{
'name': 'ma',
'ext': '.ma',
'ext': 'ma',
'files': file,
"stagingDir": folder,
}]

View file

@ -82,7 +82,7 @@ class ExtractAnimation(pype.api.Extractor):
representation = {
'name': 'abc',
'ext': '.abc',
'ext': 'abc',
'files': filename,
"stagingDir": dirname,
}

View file

@ -42,7 +42,7 @@ class ExtractAssStandin(pype.api.Extractor):
representation = {
'name': 'ass',
'ext': '.ass',
'ext': 'ass',
'files': filename,
"stagingDir": staging_dir
}

View file

@ -68,7 +68,7 @@ class ExtractAssProxy(pype.api.Extractor):
representation = {
'name': 'ma',
'ext': '.ma',
'ext': 'ma',
'files': filename,
"stagingDir": stagingdir
}

View file

@ -75,7 +75,7 @@ class ExtractCameraAlembic(pype.api.Extractor):
representation = {
'name': 'abc',
'ext': '.abc',
'ext': 'abc',
'files': filename,
"stagingDir": dir_path,
}

View file

@ -173,7 +173,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
representation = {
'name': 'ma',
'ext': '.ma',
'ext': 'ma',
'files': filename,
"stagingDir": dir_path,
}

View file

@ -213,7 +213,7 @@ class ExtractFBX(pype.api.Extractor):
representation = {
'name': 'mov',
'ext': '.mov',
'ext': 'mov',
'files': filename,
"stagingDir": stagingDir,
}

View file

@ -56,7 +56,7 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
representation = {
'name': 'ma',
'ext': '.ma',
'ext': 'ma',
'files': filename,
"stagingDir": dir_path
}

View file

@ -74,7 +74,7 @@ class ExtractModel(pype.api.Extractor):
representation = {
'name': 'ma',
'ext': '.ma',
'ext': 'ma',
'files': filename,
"stagingDir": stagingdir,
}

View file

@ -84,7 +84,7 @@ class ExtractAlembic(pype.api.Extractor):
representation = {
'name': 'abc',
'ext': '.abc',
'ext': 'abc',
'files': filename,
"stagingDir": dirname
}

View file

@ -99,7 +99,6 @@ class ExtractQuicktime(pype.api.Extractor):
playblast = capture_gui.lib.capture_scene(preset)
self.log.info("file list {}".format(playblast))
# self.log.info("Calculating HUD data overlay")
collected_frames = os.listdir(stagingdir)
collections, remainder = clique.assemble(collected_frames)
@ -107,61 +106,19 @@ class ExtractQuicktime(pype.api.Extractor):
stagingdir, collections[0].format('{head}{padding}{tail}'))
self.log.info("input {}".format(input_path))
movieFile = filename + ".mov"
movieFileBurnin = filename + "Burn" + ".mov"
full_movie_path = os.path.join(stagingdir, movieFile)
full_burnin_path = os.path.join(stagingdir, movieFileBurnin)
self.log.info("output {}".format(full_movie_path))
with avalon.maya.suspended_refresh():
try:
(
ffmpeg
.input(input_path, framerate=fps, start_number=int(start))
.output(full_movie_path)
.run(overwrite_output=True,
capture_stdout=True,
capture_stderr=True)
)
except ffmpeg.Error as e:
ffmpeg_error = 'ffmpeg error: {}'.format(e.stderr)
self.log.error(ffmpeg_error)
raise RuntimeError(ffmpeg_error)
version = instance.context.data['version']
burnin_data = {
"input": full_movie_path.replace("\\", "/"),
"output": full_burnin_path.replace("\\", "/"),
"burnin_data": {
"username": instance.context.data['user'],
"asset": os.environ['AVALON_ASSET'],
"task": os.environ['AVALON_TASK'],
"start_frame": int(start),
"version": "v" + str(version)
}
}
json_data = json.dumps(burnin_data)
scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], "pype", "scripts", "otio_burnin.py")
p = subprocess.Popen(
['python', scriptpath, json_data]
)
p.wait()
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'mov',
'ext': 'mov',
'files': movieFileBurnin,
'files': collected_frames,
"stagingDir": stagingdir,
'startFrame': start,
'endFrame': end,
'frameRate': fps,
'preview': True
'preview': True,
'tags': ['review']
}
instance.data["representations"].append(representation)

View file

@ -30,7 +30,7 @@ class ExtractRenderSetup(pype.api.Extractor):
representation = {
'name': 'json',
'ext': '.json',
'ext': 'json',
'files': json_filename,
"stagingDir": parent_dir,
}

View file

@ -39,7 +39,7 @@ class ExtractRig(pype.api.Extractor):
representation = {
'name': 'ma',
'ext': '.ma',
'ext': 'ma',
'files': filename,
"stagingDir": dir_path
}

View file

@ -137,7 +137,7 @@ class ExtractThumbnail(pype.api.Extractor):
representation = {
'name': 'thumbnail',
'ext': '.jpg',
'ext': 'jpg',
'files': thumbnail,
"stagingDir": stagingDir,
"thumbnail": True

View file

@ -59,7 +59,7 @@ class ExtractVRayProxy(pype.api.Extractor):
representation = {
'name': 'vrmesh',
'ext': '.vrmesh',
'ext': 'vrmesh',
'files': file_name,
"stagingDir": staging_dir,
}

View file

@ -0,0 +1,46 @@
import pyblish.api
@pyblish.api.log
class CollectRenderTarget(pyblish.api.InstancePlugin):
"""Collect families for all instances"""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Render Target"
hosts = ["nuke", "nukeassist"]
families = ['write']
def process(self, instance):
node = instance[0]
self.log.info('processing {}'.format(node))
families = []
if instance.data.get('families'):
families += instance.data['families']
# set for ftrack to accept
# instance.data["families"] = ["ftrack"]
if node["render"].value():
# dealing with local/farm rendering
if node["render_farm"].value():
families.append("render.farm")
else:
families.append("render.local")
else:
families.append("render.frames")
# to ignore staging dir op in integrate
instance.data['transfer'] = False
families.append('ftrack')
instance.data["families"] = families
# Sort/grouped by family (preserving local index)
instance.context[:] = sorted(instance.context, key=self.sort_by_family)
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -27,8 +27,8 @@ class ExtractScript(pype.api.Extractor):
shutil.copy(current_script, path)
if "representations" not in instance.data:
instance.data["representations"] = []
instance.data["representations"] = list()
representation = {
'name': 'nk',
'ext': '.nk',

View file

@ -0,0 +1,14 @@
import pyblish.api
import nuke
class CollectActiveViewer(pyblish.api.ContextPlugin):
"""Collect any active viewer from nodes
"""
order = pyblish.api.CollectorOrder + 0.3
label = "Collect Active Viewer"
hosts = ["nuke"]
def process(self, context):
context.data["ViewerProcess"] = nuke.ViewerProcess.node()

View file

@ -64,11 +64,11 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
"name": node.name(),
"subset": subset,
"family": avalon_knob_data["family"],
"families": [family],
"families": [avalon_knob_data["family"], family],
"avalonKnob": avalon_knob_data,
"publish": node.knob('publish').value(),
"step": 1,
"fps": int(nuke.root()['fps'].value())
"fps": nuke.root()['fps'].value()
})

View file

@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Writes"
hosts = ["nuke", "nukeassist"]
families = ["render.local", "render", "render.farm"]
families = ["render", "render.local", "render.farm"]
def process(self, instance):
@ -66,19 +66,20 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
instance.data['families'].append('ftrack')
if "representations" not in instance.data:
instance.data["representations"] = list()
try:
collected_frames = os.listdir(output_dir)
representation = {
'name': ext,
'ext': ext,
'files': collected_frames,
"stagingDir": output_dir,
"anatomy_template": "render"
}
instance.data["representations"].append(representation)
try:
collected_frames = os.listdir(output_dir)
representation['files'] = collected_frames
instance.data["representations"].append(representation)
except Exception:
instance.data["representations"].append(representation)
self.log.debug("couldn't collect frames: {}".format(label))
if 'render.local' in instance.data['families']:
@ -96,5 +97,4 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
"colorspace": node["colorspace"].value(),
})
self.log.debug("instance.data: {}".format(instance.data))

View file

@ -21,7 +21,6 @@ class NukeRenderLocal(pype.api.Extractor):
def process(self, instance):
node = instance[0]
context = instance.context
self.log.debug("instance collected: {}".format(instance.data))
@ -29,12 +28,6 @@ class NukeRenderLocal(pype.api.Extractor):
last_frame = instance.data.get("endFrame", None)
node_subset_name = instance.data.get("name", None)
# swap path to stageDir
temp_dir = self.staging_dir(instance).replace("\\", "/")
output_dir = instance.data.get("outputDir")
path = node['file'].value()
node['file'].setValue(path.replace(output_dir, temp_dir))
self.log.info("Starting render")
self.log.info("Start frame: {}".format(first_frame))
self.log.info("End frame: {}".format(last_frame))
@ -46,27 +39,26 @@ class NukeRenderLocal(pype.api.Extractor):
int(last_frame)
)
# swap path back to publish path
path = node['file'].value()
node['file'].setValue(path.replace(temp_dir, output_dir))
out_dir = os.path.dirname(path)
ext = node["file_type"].value()
if "representations" not in instance.data:
instance.data["representations"] = []
collected_frames = os.listdir(temp_dir)
collected_frames = os.listdir(out_dir)
repre = {
'name': ext,
'ext': ext,
'files': collected_frames,
"stagingDir": temp_dir,
"stagingDir": out_dir,
"anatomy_template": "render"
}
instance.data["representations"].append(repre)
self.log.info("Extracted instance '{0}' to: {1}".format(
instance.name,
temp_dir
out_dir
))
instance.data['family'] = 'render'

View file

@ -2,10 +2,9 @@ import os
import nuke
import pyblish.api
import pype
from pype.vendor import ffmpeg
class ExtractDataForReview(pype.api.Extractor):
class ExtractReviewData(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
@ -13,8 +12,7 @@ class ExtractDataForReview(pype.api.Extractor):
"""
order = pyblish.api.ExtractorOrder + 0.01
label = "Extract Review"
optional = True
label = "Extract Review Data"
families = ["review"]
hosts = ["nuke"]
@ -35,63 +33,15 @@ class ExtractDataForReview(pype.api.Extractor):
if "still" not in instance.data["families"]:
self.render_review_representation(instance,
representation="mov")
self.log.debug("review mov:")
self.transcode_mov(instance)
self.log.debug("instance.data: {}".format(instance.data))
self.render_review_representation(instance,
representation="jpeg")
else:
self.log.debug("instance: {}".format(instance))
self.render_review_representation(instance, representation="jpeg")
# Restore selection
[i["selected"].setValue(False) for i in nuke.allNodes()]
[i["selected"].setValue(True) for i in selection]
def transcode_mov(self, instance):
collection = instance.data["collection"]
stagingDir = instance.data["stagingDir"].replace("\\", "/")
file_name = collection.format("{head}mov")
review_mov = os.path.join(stagingDir, file_name).replace("\\", "/")
self.log.info("transcoding review mov: {0}".format(review_mov))
if instance.data.get("baked_colorspace_movie"):
input_movie = instance.data["baked_colorspace_movie"]
out, err = (
ffmpeg
.input(input_movie)
.output(
review_mov,
pix_fmt='yuv420p',
crf=18,
timecode="00:00:00:01"
)
.overwrite_output()
.run()
)
self.log.debug("Removing `{0}`...".format(
instance.data["baked_colorspace_movie"]))
os.remove(instance.data["baked_colorspace_movie"])
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'review',
'ext': 'mov',
'files': file_name,
"stagingDir": stagingDir,
"anatomy_template": "render",
"thumbnail": False,
"preview": True,
'startFrameReview': instance.data['startFrame'],
'endFrameReview': instance.data['endFrame'],
'frameRate': instance.context.data["framerate"]
}
instance.data["representations"].append(representation)
def render_review_representation(self,
instance,
representation="mov"):
@ -132,15 +82,20 @@ class ExtractDataForReview(pype.api.Extractor):
temporary_nodes.append(node)
reformat_node = nuke.createNode("Reformat")
reformat_node["format"].setValue("HD_1080")
reformat_node["resize"].setValue("fit")
reformat_node["filter"].setValue("Lanczos6")
reformat_node["black_outside"].setValue(True)
ref_node = self.nodes.get("Reformat", None)
if ref_node:
for k, v in ref_node:
self.log.debug("k,v: {0}:{1}".format(k,v))
if isinstance(v, unicode):
v = str(v)
reformat_node[k].setValue(v)
reformat_node.setInput(0, previous_node)
previous_node = reformat_node
temporary_nodes.append(reformat_node)
viewer_process_node = nuke.ViewerProcess.node()
viewer_process_node = instance.context.data.get("ViewerProcess")
dag_node = None
if viewer_process_node:
dag_node = nuke.createNode(viewer_process_node.Class())
@ -162,6 +117,7 @@ class ExtractDataForReview(pype.api.Extractor):
if representation in "mov":
file = fhead + "baked.mov"
name = "baked"
path = os.path.join(stagingDir, file).replace("\\", "/")
self.log.debug("Path: {}".format(path))
instance.data["baked_colorspace_movie"] = path
@ -170,11 +126,11 @@ class ExtractDataForReview(pype.api.Extractor):
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
thumbnail = False
preview = True
tags = ["review", "delete"]
elif representation in "jpeg":
file = fhead + "jpeg"
name = "thumbnail"
path = os.path.join(stagingDir, file).replace("\\", "/")
instance.data["thumbnail"] = path
write_node["file"].setValue(path)
@ -182,31 +138,29 @@ class ExtractDataForReview(pype.api.Extractor):
write_node["raw"].setValue(1)
write_node.setInput(0, previous_node)
temporary_nodes.append(write_node)
thumbnail = True
preview = False
tags = ["thumbnail"]
# retime for
first_frame = int(last_frame) / 2
last_frame = int(last_frame) / 2
# add into files for integration as representation
if "representations" not in instance.data:
instance.data["representations"] = []
repre = {
'name': representation,
'ext': representation,
'files': file,
"stagingDir": stagingDir,
"anatomy_template": "render",
"thumbnail": thumbnail,
"preview": preview
}
instance.data["representations"].append(repre)
repre = {
'name': name,
'ext': representation,
'files': file,
"stagingDir": stagingDir,
"startFrame": first_frame,
"endFrame": last_frame,
"anatomy_template": "render",
"tags": tags
}
instance.data["representations"].append(repre)
# Render frames
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
self.log.debug("representations: {}".format(instance.data["representations"]))
# Clean up
for node in temporary_nodes:
nuke.delete(node)

View file

@ -0,0 +1,18 @@
import pyblish.api
import nuke
class ValidateActiveViewer(pyblish.api.ContextPlugin):
"""Validate presentse of the active viewer from nodes
"""
order = pyblish.api.ValidatorOrder
label = "Validate Active Viewer"
hosts = ["nuke"]
def process(self, context):
viewer_process_node = context.data.get("ViewerProcess")
assert viewer_process_node, (
"Missing active viewer process! Please click on output write node and push key number 1-9"
)

View file

@ -11,9 +11,12 @@ class RepairCollectionAction(pyblish.api.Action):
icon = "wrench"
def process(self, context, plugin):
self.log.info(context[0])
files_remove = [os.path.join(context[0].data["outputDir"], f)
for f in context[0].data["files"]]
for r in context[0].data.get("representations", [])
for f in r.get("files", [])
]
self.log.info(files_remove)
for f in files_remove:
os.remove(f)
self.log.debug("removing file: {}".format(f))
@ -38,7 +41,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
if not repre.get('files'):
msg = ("no frames were collected, "
"you need to render them")
self.log.error(msg)
self.log.warning(msg)
raise ValidationException(msg)
collections, remainder = clique.assemble(repre["files"])

View file

@ -24,7 +24,7 @@ class ValidateScript(pyblish.api.InstancePlugin):
# These attributes will be checked
attributes = [
"fps", "fstart", "fend",
"resolution_width", "resolution_height", "pixel_aspect", "handle_start", "handle_end"
"resolution_width", "resolution_height", "handle_start", "handle_end"
]
# Value of these attributes can be found on parents

View file

@ -41,7 +41,7 @@ class CollectClipHandles(api.ContextPlugin):
})
for instance in filtered_instances:
if not instance.data.get("main") or not instance.data.get("handleTag"):
if not instance.data.get("main") and not instance.data.get("handleTag"):
self.log.debug("Synchronize handles on: `{}`".format(
instance.data["name"]))
name = instance.data["asset"]

View file

@ -5,7 +5,7 @@ class CollectClipTagTypes(api.InstancePlugin):
"""Collect Types from Tags of selected track items."""
order = api.CollectorOrder + 0.012
label = "Collect Plate Type from Tag"
label = "Collect main flag"
hosts = ["nukestudio"]
families = ['clip']
@ -25,7 +25,8 @@ class CollectClipTagTypes(api.InstancePlugin):
t_subset.capitalize())
if "plateMain" in subset_name:
instance.data["main"] = True
if not instance.data.get("main"):
instance.data["main"] = True
self.log.info("`plateMain` found in instance.name: `{}`".format(
instance.data["name"]))
return

View file

@ -28,7 +28,7 @@ def get_version_from_path(file):
v: version number in string ('001')
"""
pattern = re.compile(r"_v([0-9]*)")
pattern = re.compile(r"[\._]v([0-9]*)")
try:
v = pattern.findall(file)[0]
return v

View file

@ -1 +1,9 @@
<?xml version="1.0" ?><svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><g><g><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="0.0000038" x2="512" y1="256" y2="256"><stop offset="0" style="stop-color:#00AEEE"/><stop offset="1" style="stop-color:#0095DA"/></linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666679" x2="469.3333435" y1="256.0005188" y2="256.0005188"><stop offset="0" style="stop-color:#0095DA"/><stop offset="1" style="stop-color:#00AEEE"/></linearGradient><path d="M256,469.3338623c-117.6314697,0-213.3333282-95.7023926-213.3333282-213.3333435 c0-117.6314545,95.7018585-213.333313,213.3333282-213.333313c117.636261,0,213.3333435,95.7018661,213.3333435,213.333313 C469.3333435,373.6314697,373.636261,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/></g><path d="M360.2250671,330.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C315.2849121,166.9605255,292.3237305,144,264,144 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842865,51.0314636v46.6655884h-91.2256165 v37.9413452h-21.5375977V384h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256165v27.9413452h-21.5376129V384h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H328.687561V384h53.0761719v-53.0773315H360.2250671z" opacity="0.3"/><path d="M352.2250671,322.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C307.2849121,158.9605255,284.3237305,136,256,136 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842712,51.0314636v46.6655884h-91.2256012 v37.9413452h-21.5375977V376h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256012v27.9413452h-21.5375977V376h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H320.687561V376h53.0761719v-53.0773315H352.2250671z" fill="#FFFFFF"/></g></svg>
<?xml version="1.0" ?>
<svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g transform="scale(1.5)">
<g transform="translate(-78, -84)">
<path d="M360.2250671,330.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C315.2849121,166.9605255,292.3237305,144,264,144 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842865,51.0314636v46.6655884h-91.2256165 v37.9413452h-21.5375977V384h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256165v27.9413452h-21.5376129V384h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H328.687561V384h53.0761719v-53.0773315H360.2250671z" opacity="0.3"/>
<path d="M352.2250671,322.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C307.2849121,158.9605255,284.3237305,136,256,136 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842712,51.0314636v46.6655884h-91.2256012 v37.9413452h-21.5375977V376h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256012v27.9413452h-21.5375977V376h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H320.687561V376h53.0761719v-53.0773315H352.2250671z" fill="#FFFFFF"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

Before After
Before After

View file

@ -0,0 +1 @@
<?xml version="1.0" ?><svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><g><g><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="0.0000038" x2="512" y1="256" y2="256"><stop offset="0" style="stop-color:#00AEEE"/><stop offset="1" style="stop-color:#0095DA"/></linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666679" x2="469.3333435" y1="256.0005188" y2="256.0005188"><stop offset="0" style="stop-color:#0095DA"/><stop offset="1" style="stop-color:#00AEEE"/></linearGradient><path d="M256,469.3338623c-117.6314697,0-213.3333282-95.7023926-213.3333282-213.3333435 c0-117.6314545,95.7018585-213.333313,213.3333282-213.333313c117.636261,0,213.3333435,95.7018661,213.3333435,213.333313 C469.3333435,373.6314697,373.636261,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/></g><path d="M360.2250671,330.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C315.2849121,166.9605255,292.3237305,144,264,144 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842865,51.0314636v46.6655884h-91.2256165 v37.9413452h-21.5375977V384h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256165v27.9413452h-21.5376129V384h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H328.687561V384h53.0761719v-53.0773315H360.2250671z" opacity="0.3"/><path d="M352.2250671,322.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C307.2849121,158.9605255,284.3237305,136,256,136 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842712,51.0314636v46.6655884h-91.2256012 v37.9413452h-21.5375977V376h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256012v27.9413452h-21.5375977V376h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H320.687561V376h53.0761719v-53.0773315H352.2250671z" fill="#FFFFFF"/></g></svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View file

@ -1,18 +1,25 @@
import os
import sys
from pypeapp import config
from pype.maya import lib
import pype.maya.lib as mlib
from maya import cmds
def build_shelf():
presets = config.get_presets()
shelf_preset = presets['maya'].get('project_shelf')
if shelf_preset:
project = os.environ["AVALON_PROJECT"]
for k, v in shelf_preset['imports'].items():
sys.modules[k] = __import__(v, fromlist=[project])
print("starting PYPE usersetup")
lib.shelf(name=shelf_preset['name'], preset=shelf_preset)
# build a shelf
presets = config.get_presets()
shelf_preset = presets['maya'].get('project_shelf')
cmds.evalDeferred("build_shelf()")
if shelf_preset:
project = os.environ["AVALON_PROJECT"]
for i in shelf_preset['imports']:
import_string = "from {} import {}".format(project, i)
print(import_string)
exec(import_string)
cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], preset=shelf_preset)")
print("finished PYPE usersetup")