Merge branch 'develop' into feature/PYPE-18-maya-publishing-playblasts

This commit is contained in:
Milan Kolar 2018-12-06 23:38:04 +01:00
commit bdbd1a7189
69 changed files with 4289 additions and 776 deletions

View file

@ -6,6 +6,15 @@ from avalon import api as avalon
from .launcher_actions import register_launcher_actions
from .lib import collect_container_metadata
import logging
log = logging.getLogger(__name__)
# do not delete these are mandatory
Anatomy = None
Dataflow = None
Metadata = None
Colorspace = None
PACKAGE_DIR = os.path.dirname(__file__)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -15,12 +24,13 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "global", "load")
def install():
print("Registering global plug-ins..")
log.info("Registering global plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
def uninstall():
print("Deregistering global plug-ins..")
log.info("Deregistering global plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
log.info("Global plug-ins unregistred")

View file

@ -15,6 +15,26 @@ from .action import (
RepairContextAction
)
from app.api import Logger
from . import (
Anatomy,
Colorspace,
Metadata,
Dataflow
)
from .templates import (
load_data_from_templates,
reset_data_from_templates,
get_project_name,
get_project_code,
get_hiearchy,
get_asset,
get_task,
fill_avalon_workdir,
get_version_from_workfile
)
__all__ = [
# plugin classes
"Extractor",
@ -25,5 +45,28 @@ __all__ = [
"ValidateMeshOrder",
# action
"get_errored_instances_from_context",
"RepairAction"
"RepairAction",
"Logger",
# contectual templates
# get data to preloaded templates
"load_data_from_templates",
"reset_data_from_templates",
# get contextual data
"get_project_name",
"get_project_code",
"get_hiearchy",
"get_asset",
"get_task",
"fill_avalon_workdir",
"get_version_from_workfile",
# preloaded templates
"Anatomy",
"Colorspace",
"Metadata",
"Dataflow"
]

View file

@ -9,7 +9,7 @@ from app.api import Logger
log = Logger.getLogger(__name__)
def registerApp(app, session):
name = app['name'].split("_")[0]
name = app['name'].replace("_", ".")
variant = ""
try:
variant = app['name'].split("_")[1]
@ -31,11 +31,19 @@ def registerApp(app, session):
label = apptoml['ftrack_label']
icon = None
ftrack_resources = "" # Path to resources here
if 'icon' in apptoml:
icon = apptoml['icon']
if '{ftrack_resources}' in icon:
icon = icon.format(ftrack_resources)
description = None
if 'description' in apptoml:
description = apptoml['description']
# register action
AppAction(session, label, name, executable, variant, icon).register()
AppAction(session, label, name, executable, variant, icon, description).register()
def register(session):
@ -59,6 +67,7 @@ def register(session):
appNames.append(app['name'])
apps.append(app)
apps = sorted(apps, key=lambda x: x['name'])
for app in apps:
try:
registerApp(app, session)

View file

@ -24,14 +24,18 @@ class AvalonIdAttribute(BaseAction):
def discover(self, session, entities, event):
''' Validation '''
'''
Validation
- action is only for Administrators
'''
success = False
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
for role in user['user_security_roles']:
if role['security_role']['name'] == 'Administrator':
success = True
# userId = event['source']['user']['id']
# user = session.query('User where id is ' + userId).one()
# if user['user_security_roles'][0]['security_role']['name'] != 'Administrator':
# return False
return True
return success
def launch(self, session, entities, event):
@ -49,13 +53,21 @@ class AvalonIdAttribute(BaseAction):
})
session.commit()
try:
# Checkbox for event sync
cbxSyncName = 'avalon_auto_sync'
cbxSyncLabel = 'Avalon auto-sync'
cbxSyncExist = False
# Attribute Name and Label
custAttrName = 'avalon_mongo_id'
custAttrLabel = 'Avalon/Mongo Id'
attrs_update = set()
# Types that don't need object_type_id
base = {'show'}
# Don't create custom attribute on these entity types:
exceptions = ['task','milestone','library']
exceptions = ['task', 'milestone']
exceptions.extend(base)
# Get all possible object types
all_obj_types = session.query('ObjectType').all()
@ -73,6 +85,7 @@ class AvalonIdAttribute(BaseAction):
# Get IDs of filtered object types
all_obj_types_id = set()
for obj in all_obj_types:
all_obj_types_id.add(obj['id'])
@ -80,20 +93,60 @@ class AvalonIdAttribute(BaseAction):
current_cust_attr = session.query('CustomAttributeConfiguration').all()
# Filter already existing AvalonMongoID attr.
for attr in current_cust_attr:
if attr['key'] == cbxSyncName:
cbxSyncExist = True
cbxAttribute = attr
if attr['key'] == custAttrName:
if attr['entity_type'] in base:
base.remove(attr['entity_type'])
attrs_update.add(attr)
if attr['object_type_id'] in all_obj_types_id:
all_obj_types_id.remove(attr['object_type_id'])
attrs_update.add(attr)
# Set session back to begin("session.query" raises error on commit)
session.rollback()
# Set security roles for attribute
custAttrSecuRole = session.query('SecurityRole').all()
role_api = session.query('SecurityRole where name is "API"').one()
role_admin = session.query('SecurityRole where name is "Administrator"').one()
roles = [role_api,role_admin]
# Set Text type of Attribute
custom_attribute_type = session.query(
'CustomAttributeType where name is "text"'
).one()
# Get/Set 'avalon' group
groups = session.query('CustomAttributeGroup where name is "avalon"').all()
if len(groups) > 1:
msg = "There are more Custom attribute groups with name 'avalon'"
self.log.warning(msg)
return { 'success': False, 'message':msg }
elif len(groups) < 1:
group = session.create('CustomAttributeGroup', {
'name': 'avalon',
})
session.commit()
else:
group = groups[0]
# Checkbox for auto-sync event / Create or Update(roles + group)
if cbxSyncExist is False:
cbxType = session.query('CustomAttributeType where name is "boolean"').first()
session.create('CustomAttributeConfiguration', {
'entity_type': 'show',
'type': cbxType,
'label': cbxSyncLabel,
'key': cbxSyncName,
'default': False,
'write_security_roles': roles,
'read_security_roles': roles,
'group':group,
})
else:
cbxAttribute['write_security_roles'] = roles
cbxAttribute['read_security_roles'] = roles
cbxAttribute['group'] = group
for entity_type in base:
# Create a custom attribute configuration.
@ -103,8 +156,9 @@ class AvalonIdAttribute(BaseAction):
'label': custAttrLabel,
'key': custAttrName,
'default': '',
'write_security_roles': custAttrSecuRole,
'read_security_roles': custAttrSecuRole,
'write_security_roles': roles,
'read_security_roles': roles,
'group':group,
'config': json.dumps({'markdown': False})
})
@ -117,16 +171,24 @@ class AvalonIdAttribute(BaseAction):
'label': custAttrLabel,
'key': custAttrName,
'default': '',
'write_security_roles': custAttrSecuRole,
'read_security_roles': custAttrSecuRole,
'write_security_roles': roles,
'read_security_roles': roles,
'group':group,
'config': json.dumps({'markdown': False})
})
for attr in attrs_update:
attr['write_security_roles'] = roles
attr['read_security_roles'] = roles
attr['group'] = group
job['status'] = 'done'
session.commit()
except Exception as e:
session.rollback()
job['status'] = 'failed'
session.commit()
self.log.error("Creating custom attributes failed ({})".format(e))
return True

View file

@ -1,354 +0,0 @@
# :coding: utf-8
# :copyright: Copyright (c) 2017 ftrack
import sys
import argparse
import logging
import os
import ftrack_api
import json
import re
from ftrack_action_handler import BaseAction
from avalon import io, inventory, lib
from avalon.vendor import toml
class SyncToAvalon(BaseAction):
'''Edit meta data action.'''
#: Action identifier.
identifier = 'sync.to.avalon'
#: Action label.
label = 'SyncToAvalon'
#: Action description.
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = 'https://cdn1.iconfinder.com/data/icons/hawcons/32/699650-icon-92-inbox-download-512.png'
def discover(self, session, entities, event):
''' Validation '''
discover = False
for entity in entities:
if entity.entity_type.lower() not in ['task', 'assetversion']:
discover = True
break
return discover
def launch(self, session, entities, event):
message = ""
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Synch Ftrack to Avalon.'
})
})
try:
self.log.info("action <" + self.__class__.__name__ + "> is running")
#TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug
self.setAvalonAttributes(session)
self.importable = []
# get from top entity in hierarchy all parent entities
top_entity = entities[0]['link']
if len(top_entity) > 1:
for e in top_entity:
parent_entity = session.get(e['type'], e['id'])
self.importable.append(parent_entity)
# get all child entities separately/unique
for entity in entities:
self.getShotAsset(entity)
# Check duplicate name - raise error if found
all_names = {}
duplicates = []
for e in self.importable:
name = self.checkName(e['name'])
if name in all_names:
duplicates.append("'{}'-'{}'".format(all_names[name], e['name']))
else:
all_names[name] = e['name']
if len(duplicates) > 0:
raise ValueError("Unable to sync: Entity name duplication: {}".format(", ".join(duplicates)))
# Import all entities to Avalon DB
for e in self.importable:
self.importToAvalon(session, e)
job['status'] = 'done'
session.commit()
self.log.info('Synchronization to Avalon was successfull!')
except Exception as e:
job['status'] = 'failed'
message = str(e)
self.log.error('During synchronization to Avalon went something wrong! ({})'.format(message))
if len(message) > 0:
return {
'success': False,
'message': message
}
return {
'success': True,
'message': "Synchronization was successfull"
}
def setAvalonAttributes(self, session):
self.custom_attributes = []
all_avalon_attr = session.query('CustomAttributeGroup where name is "avalon"').one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' not in cust_attr['key']:
self.custom_attributes.append(cust_attr)
def getShotAsset(self, entity):
if not (entity.entity_type in ['Task']):
if entity not in self.importable:
self.importable.append(entity)
if entity['children']:
childrens = entity['children']
for child in childrens:
self.getShotAsset(child)
def checkName(self, input_name):
if input_name.find(" ") == -1:
name = input_name
else:
name = input_name.replace(" ", "-")
self.log.info("Name of {} was changed to {}".format(input_name, name))
return name
def getConfig(self, entity):
apps = []
for app in entity['custom_attributes']['applications']:
try:
label = toml.load(lib.which_app(app))['label']
apps.append({'name':app, 'label':label})
except Exception as e:
self.log.error('Error with application {0} - {1}'.format(app, e))
config = {
'schema': 'avalon-core:config-1.0',
'tasks': [{'name': ''}],
'apps': apps,
# TODO redo work!!!
'template': {
'workfile': '{asset[name]}_{task[name]}_{version:0>3}<_{comment}>',
'work': '{root}/{project}/{hierarchy}/{asset}/work/{task}',
'publish':'{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'}
}
return config
def importToAvalon(self, session, entity):
eLinks = []
ca_mongoid = 'avalon_mongo_id'
# get needed info of entity and all parents
for e in entity['link']:
tmp = session.get(e['type'], e['id'])
eLinks.append(tmp)
entityProj = eLinks[0]
# set AVALON_PROJECT env
os.environ["AVALON_PROJECT"] = entityProj["full_name"]
os.environ["AVALON_ASSET"] = entityProj['full_name']
# Set project template
template = {"schema": "avalon-core:inventory-1.0"}
# --- Begin: PUSH TO Avalon ---
io.install()
## ----- PROJECT ------
# If project don't exists -> <Create project> ELSE <Update Config>
avalon_project = io.find_one({"type": "project", "name": entityProj["full_name"]})
entity_type = entity.entity_type
data = {}
data['ftrackId'] = entity['id']
data['entityType'] = entity_type
for cust_attr in self.custom_attributes:
if cust_attr['entity_type'].lower() in ['asset']:
data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']]
elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project':
data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']]
elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project':
# Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build')
entity_type = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type)
# Get object id of entity type
ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type)).one()['id']
if cust_attr['object_type_id'] == ent_obj_type_id:
data[cust_attr['key']] = entity['custom_attributes'][cust_attr['key']]
if entity.entity_type.lower() in ['project']:
# Set project Config
config = self.getConfig(entity)
if avalon_project is None:
inventory.save(entityProj['full_name'], config, template)
else:
io.update_many({'type': 'project','name': entityProj['full_name']},
{'$set':{'config':config}})
data['code'] = entity['name']
# Store info about project (FtrackId)
io.update_many({
'type': 'project',
'name': entity['full_name']},
{'$set':{'data':data}})
projectId = io.find_one({"type": "project", "name": entityProj["full_name"]})["_id"]
if ca_mongoid in entity['custom_attributes']:
entity['custom_attributes'][ca_mongoid] = str(projectId)
else:
self.log.error("Custom attribute for <{}> is not created.".format(entity['name']))
io.uninstall()
return
# Store project Id
projectId = avalon_project["_id"]
## ----- ASSETS ------
# Presets:
# TODO how to check if entity is Asset Library or AssetBuild?
if entity.entity_type in ['AssetBuild', 'Library']:
silo = 'Assets'
else:
silo = 'Film'
os.environ['AVALON_SILO'] = silo
# Get list of parents without project
parents = []
for i in range(1, len(eLinks)-1):
parents.append(eLinks[i])
# Get info for 'Data' in Avalon DB
tasks = []
for child in entity['children']:
if child.entity_type in ['Task']:
tasks.append(child['name'])
folderStruct = []
parentId = None
for parent in parents:
name = self.checkName(parent['name'])
folderStruct.append(name)
parentId = io.find_one({'type': 'asset', 'name': name})['_id']
if parent['parent'].entity_type != 'project' and parentId is None:
self.importToAvalon(parent)
parentId = io.find_one({'type': 'asset', 'name': name})['_id']
hierarchy = os.path.sep.join(folderStruct)
data['visualParent'] = parentId
data['parents'] = folderStruct
data['tasks'] = tasks
data['hierarchy'] = hierarchy
name = self.checkName(entity['name'])
os.environ['AVALON_ASSET'] = name
# Try to find asset in current database
avalon_asset = io.find_one({'type': 'asset', 'name': name})
# Create if don't exists
if avalon_asset is None:
inventory.create_asset(name, silo, data, projectId)
self.log.debug("Asset {} - created".format(name))
# Raise error if it seems to be different ent. with same name
elif (avalon_asset['data']['ftrackId'] != data['ftrackId'] or
avalon_asset['data']['visualParent'] != data['visualParent'] or
avalon_asset['data']['parents'] != data['parents']):
raise ValueError('Entity <{}> is not same'.format(name))
# Else update info
else:
io.update_many({'type': 'asset','name': name},
{'$set':{'data':data, 'silo': silo}})
# TODO check if is asset in same folder!!! ???? FEATURE FOR FUTURE
self.log.debug("Asset {} - updated".format(name))
## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK
# Set custom attribute to avalon/mongo id of entity (parentID is last)
if ca_mongoid in entity['custom_attributes']:
entity['custom_attributes'][ca_mongoid] = str(parentId)
else:
self.log.error("Custom attribute for <{}> is not created.".format(entity['name']))
io.uninstall()
session.commit()
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
action_handler = SyncToAvalon(session)
action_handler.register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -0,0 +1,351 @@
import sys
import argparse
import logging
import os
import ftrack_api
import json
import re
from pype import lib
from ftrack_action_handler import BaseAction
from bson.objectid import ObjectId
from avalon import io, inventory
from pype.ftrack import ftrack_utils
class SyncToAvalon(BaseAction):
'''
Synchronizing data action - from Ftrack to Avalon DB
Stores all information about entity.
- Name(string) - Most important information = identifier of entity
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
- Silo(string) - Last parent except project
- Data(dictionary):
- VisualParent(ObjectId) - Avalon Id of parent asset
- Parents(array of string) - All parent names except project
- Tasks(array of string) - Tasks on asset
- FtrackId(string)
- entityType(string) - entity's type on Ftrack
* All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
* These information are stored also for all parents and children entities.
Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
- action IS NOT creating this Custom attribute if doesn't exist
- run 'Create Custom Attributes' action or do it manually (Not recommended)
If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
- name, parents and silo are checked -> shows error if are not exact the same
- after sync it is not allowed to change names or move entities
If ID in 'avalon_mongo_id' is empty string or is not found in DB:
- tries to find entity by name
- found:
- raise error if ftrackId/visual parent/parents are not same
- not found:
- Creates asset/project
'''
#: Action identifier.
identifier = 'sync.to.avalon.local'
#: Action label.
label = 'SyncToAvalon - Local'
#: Action description.
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = 'https://cdn1.iconfinder.com/data/icons/hawcons/32/699650-icon-92-inbox-download-512.png'
def discover(self, session, entities, event):
''' Validation '''
roleCheck = False
discover = False
roleList = ['Pypeclub']
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
for role in user['user_security_roles']:
if role['security_role']['name'] in roleList:
roleCheck = True
if roleCheck is True:
for entity in entities:
if entity.entity_type.lower() not in ['task', 'assetversion']:
discover = True
break
return discover
def launch(self, session, entities, event):
message = ""
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Synch Ftrack to Avalon.'
})
})
try:
self.log.info("Action <" + self.__class__.__name__ + "> is running")
self.ca_mongoid = 'avalon_mongo_id'
#TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug
self.setAvalonAttributes()
self.importable = []
# get from top entity in hierarchy all parent entities
top_entity = entities[0]['link']
if len(top_entity) > 1:
for e in top_entity:
parent_entity = session.get(e['type'], e['id'])
self.importable.append(parent_entity)
# get all child entities separately/unique
for entity in entities:
self.getShotAsset(entity)
# Check names: REGEX in schema/duplicates - raise error if found
all_names = []
duplicates = []
for e in self.importable:
ftrack_utils.avalon_check_name(e)
if e['name'] in all_names:
duplicates.append("'{}'".format(e['name']))
else:
all_names.append(e['name'])
if len(duplicates) > 0:
raise ValueError("Entity name duplication: {}".format(", ".join(duplicates)))
## ----- PROJECT ------
# store Ftrack project- self.importable[0] must be project entity!!!
self.entityProj = self.importable[0]
# set AVALON_ env
os.environ["AVALON_PROJECT"] = self.entityProj["full_name"]
os.environ["AVALON_ASSET"] = self.entityProj["full_name"]
self.avalon_project = None
io.install()
# Import all entities to Avalon DB
for e in self.importable:
self.importToAvalon(session, e)
io.uninstall()
job['status'] = 'done'
session.commit()
self.log.info('Synchronization to Avalon was successfull!')
except ValueError as ve:
job['status'] = 'failed'
session.commit()
message = str(ve)
self.log.error('Error during syncToAvalon: {}'.format(message))
except Exception as e:
job['status'] = 'failed'
session.commit()
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
log_message = "{}/{}/Line: {}".format(exc_type, fname, exc_tb.tb_lineno)
self.log.error('Error during syncToAvalon: {}'.format(log_message))
message = 'Unexpected Error - Please check Log for more information'
if len(message) > 0:
message = "Unable to sync: {}".format(message)
return {
'success': False,
'message': message
}
return {
'success': True,
'message': "Synchronization was successfull"
}
def setAvalonAttributes(self):
self.custom_attributes = []
all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' not in cust_attr['key']:
self.custom_attributes.append(cust_attr)
def getShotAsset(self, entity):
if not (entity.entity_type in ['Task']):
if entity not in self.importable:
self.importable.append(entity)
if entity['children']:
childrens = entity['children']
for child in childrens:
self.getShotAsset(child)
def importToAvalon(self, session, entity):
# --- Begin: PUSH TO Avalon ---
entity_type = entity.entity_type
if entity_type.lower() in ['project']:
# Set project Config
config = ftrack_utils.get_config(entity)
# Set project template
template = lib.get_avalon_project_template_schema()
if self.ca_mongoid in entity['custom_attributes']:
try:
projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid])
self.avalon_project = io.find_one({"_id": projectId})
except:
self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name']))
if self.avalon_project is None:
self.avalon_project = io.find_one({
"type": "project",
"name": entity["full_name"]
})
if self.avalon_project is None:
inventory.save(entity['full_name'], config, template)
self.avalon_project = io.find_one({
"type": "project",
"name": entity["full_name"]
})
elif self.avalon_project['name'] != entity['full_name']:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name))
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
# Store info about project (FtrackId)
io.update_many({
'type': 'project',
'name': entity['full_name']
}, {
'$set':{'data':data, 'config':config}
})
self.projectId = self.avalon_project["_id"]
if self.ca_mongoid in entity['custom_attributes']:
entity['custom_attributes'][self.ca_mongoid] = str(self.projectId)
else:
self.log.error('Custom attribute for "{}" is not created.'.format(entity['name']))
return
## ----- ASSETS ------
# Presets:
data = ftrack_utils.get_data(self, entity, session, self.custom_attributes)
# return if entity is silo
if len(data['parents']) == 0:
return
else:
silo = data['parents'][0]
os.environ['AVALON_SILO'] = silo
name = entity['name']
os.environ['AVALON_ASSET'] = name
# Try to find asset in current database
avalon_asset = None
if self.ca_mongoid in entity['custom_attributes']:
try:
entityId = ObjectId(entity['custom_attributes'][self.ca_mongoid])
avalon_asset = io.find_one({"_id": entityId})
except:
self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name']))
if avalon_asset is None:
avalon_asset = io.find_one({'type': 'asset', 'name': name})
# Create if don't exists
if avalon_asset is None:
inventory.create_asset(name, silo, data, self.projectId)
self.log.debug("Asset {} - created".format(name))
# Raise error if it seems to be different ent. with same name
elif (avalon_asset['data']['parents'] != data['parents'] or
avalon_asset['silo'] != silo):
raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name))
elif avalon_asset['name'] != entity['name']:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name))
elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']:
old_path = "/".join(avalon_asset['data']['parents'])
new_path = "/".join(data['parents'])
raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" '.format(avalon_asset['name'], old_path, new_path))
# Update info
io.update_many({'type': 'asset','name': name},
{'$set':{'data':data, 'silo': silo}})
self.log.debug("Asset {} - updated".format(name))
entityId = io.find_one({'type': 'asset', 'name': name})['_id']
## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK
# Set custom attribute to avalon/mongo id of entity (parentID is last)
if self.ca_mongoid in entity['custom_attributes']:
entity['custom_attributes'][self.ca_mongoid] = str(entityId)
else:
self.log.error("Custom attribute for <{}> is not created.".format(entity['name']))
session.commit()
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
action_handler = SyncToAvalon(session)
action_handler.register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -1,6 +1,7 @@
# :coding: utf-8
# :copyright: Copyright (c) 2017 ftrack
import os
import sys
import logging
import getpass
import platform
@ -10,14 +11,9 @@ from avalon import io, lib, pipeline
from avalon import session as sess
import acre
from app.api import (
Templates,
Logger
)
t = Templates(
type=["anatomy"]
)
from pype import api as pype
class AppAction(object):
@ -34,7 +30,7 @@ class AppAction(object):
def __init__(self, session, label, name, executable, variant=None, icon=None, description=None):
'''Expects a ftrack_api.Session instance'''
self.log = Logger.getLogger(self.__class__.__name__)
self.log = pype.Logger.getLogger(self.__class__.__name__)
# self.logger = Logger.getLogger(__name__)
@ -73,6 +69,7 @@ class AppAction(object):
),
self._launch
)
self.log.info("Application '{} {}' - Registered successfully".format(self.label,self.variant))
def _discover(self, event):
args = self._translate_event(
@ -229,13 +226,9 @@ class AppAction(object):
entity, id = entities[0]
entity = session.get(entity, id)
silo = "Film"
if entity.entity_type == "AssetBuild":
silo = "Asset"
# set environments for Avalon
os.environ["AVALON_PROJECT"] = entity['project']['full_name']
os.environ["AVALON_SILO"] = silo
os.environ["AVALON_SILO"] = entity['ancestors'][0]['name']
os.environ["AVALON_ASSET"] = entity['parent']['name']
os.environ["AVALON_TASK"] = entity['name']
os.environ["AVALON_APP"] = self.identifier
@ -243,7 +236,7 @@ class AppAction(object):
os.environ["FTRACK_TASKID"] = id
anatomy = t.anatomy
anatomy = pype.Anatomy
io.install()
hierarchy = io.find_one({"type": 'asset', "name": entity['parent']['name']})[
'data']['parents']
@ -257,9 +250,10 @@ class AppAction(object):
"task": entity['name'],
"asset": entity['parent']['name'],
"hierarchy": hierarchy}
anatomy = anatomy.format(data)
try:
anatomy = anatomy.format(data)
except Exception as e:
self.log.error("{0} Error in anatomy.format: {1}".format(__name__, e))
os.environ["AVALON_WORKDIR"] = os.path.join(anatomy.work.root, anatomy.work.folder)
# TODO Add paths to avalon setup from tomls
@ -299,20 +293,71 @@ class AppAction(object):
# Full path to executable launcher
execfile = None
for ext in os.environ["PATHEXT"].split(os.pathsep):
fpath = os.path.join(path.strip('"'), self.executable + ext)
if os.path.isfile(fpath) and os.access(fpath, os.X_OK):
execfile = fpath
break
if sys.platform == "win32":
for ext in os.environ["PATHEXT"].split(os.pathsep):
fpath = os.path.join(path.strip('"'), self.executable + ext)
if os.path.isfile(fpath) and os.access(fpath, os.X_OK):
execfile = fpath
break
pass
# Run SW if was found executable
if execfile is not None:
lib.launch(executable=execfile, args=[], environment=env)
else:
return {
'success': False,
'message': "We didn't found launcher for {0}"
.format(self.label)
}
pass
if sys.platform.startswith('linux'):
execfile = os.path.join(path.strip('"'), self.executable)
if os.path.isfile(execfile):
try:
fp = open(execfile)
except PermissionError as p:
self.log.error('Access denied on {0} - {1}'.
format(execfile, p))
return {
'success': False,
'message': "Access denied on launcher - {}".
format(execfile)
}
fp.close()
# check executable permission
if not os.access(execfile, os.X_OK):
self.log.error('No executable permission on {}'.
format(execfile))
return {
'success': False,
'message': "No executable permission - {}"
.format(execfile)
}
pass
else:
self.log.error('Launcher doesn\'t exist - {}'.
format(execfile))
return {
'success': False,
'message': "Launcher doesn't exist - {}"
.format(execfile)
}
pass
# Run SW if was found executable
if execfile is not None:
lib.launch('/usr/bin/env', args=['bash', execfile], environment=env)
else:
return {
'success': False,
'message': "We didn't found launcher for {0}"
.format(self.label)
}
pass
# Run SW if was found executable
if execfile is not None:
lib.launch(executable=execfile, args=[], environment=env)
else:
return {
'success': False,
'message': "We didn't found launcher for {0}".format(self.label)
}
# RUN TIMER IN FTRACK
username = event['source']['user']['username']
@ -400,7 +445,7 @@ class BaseAction(object):
def __init__(self, session):
'''Expects a ftrack_api.Session instance'''
self.log = Logger.getLogger(self.__class__.__name__)
self.log = pype.Logger.getLogger(self.__class__.__name__)
if self.label is None:
raise ValueError(
@ -437,7 +482,8 @@ class BaseAction(object):
),
self._launch
)
self.log.info("----- action - <" + self.__class__.__name__ + "> - Has been registered -----")
self.log.info("Action '{}' - Registered successfully".format(self.__class__.__name__))
def _discover(self, event):
args = self._translate_event(

View file

@ -0,0 +1,366 @@
import sys
import argparse
import logging
import os
import ftrack_api
import json
import re
from pype import lib
from pype.ftrack.actions.ftrack_action_handler import BaseAction
from bson.objectid import ObjectId
from avalon import io, inventory
from pype.ftrack import ftrack_utils
class Sync_To_Avalon(BaseAction):
'''
Synchronizing data action - from Ftrack to Avalon DB
Stores all information about entity.
- Name(string) - Most important information = identifier of entity
- Parent(ObjectId) - Avalon Project Id, if entity is not project itself
- Silo(string) - Last parent except project
- Data(dictionary):
- VisualParent(ObjectId) - Avalon Id of parent asset
- Parents(array of string) - All parent names except project
- Tasks(array of string) - Tasks on asset
- FtrackId(string)
- entityType(string) - entity's type on Ftrack
* All Custom attributes in group 'Avalon' which name don't start with 'avalon_'
* These information are stored also for all parents and children entities.
Avalon ID of asset is stored to Ftrack -> Custom attribute 'avalon_mongo_id'.
- action IS NOT creating this Custom attribute if doesn't exist
- run 'Create Custom Attributes' action or do it manually (Not recommended)
If Ftrack entity already has Custom Attribute 'avalon_mongo_id' that stores ID:
- name, parents and silo are checked -> shows error if are not exact the same
- after sync it is not allowed to change names or move entities
If ID in 'avalon_mongo_id' is empty string or is not found in DB:
- tries to find entity by name
- found:
- raise error if ftrackId/visual parent/parents are not same
- not found:
- Creates asset/project
'''
#: Action identifier.
identifier = 'sync.to.avalon'
#: Action label.
label = 'SyncToAvalon'
#: Action description.
description = 'Send data from Ftrack to Avalon'
#: Action icon.
icon = 'https://cdn1.iconfinder.com/data/icons/hawcons/32/699650-icon-92-inbox-download-512.png'
def register(self):
'''Registers the action, subscribing the the discover and launch topics.'''
self.session.event_hub.subscribe(
'topic=ftrack.action.discover',
self._discover
)
self.session.event_hub.subscribe(
'topic=ftrack.action.launch and data.actionIdentifier={0}'.format(
self.identifier
),
self._launch
)
self.log.info("Action '{}' - Registered successfully".format(self.__class__.__name__))
def discover(self, session, entities, event):
''' Validation '''
roleCheck = False
discover = False
roleList = ['Administrator', 'Project Manager']
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
for role in user['user_security_roles']:
if role['security_role']['name'] in roleList:
roleCheck = True
if roleCheck is True:
for entity in entities:
if entity.entity_type.lower() not in ['task', 'assetversion']:
discover = True
break
return discover
def launch(self, session, entities, event):
message = ""
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Synch Ftrack to Avalon.'
})
})
try:
self.log.info("Action <" + self.__class__.__name__ + "> is running")
self.ca_mongoid = 'avalon_mongo_id'
#TODO AVALON_PROJECTS, AVALON_ASSET, AVALON_SILO should be set up otherwise console log shows avalon debug
self.setAvalonAttributes()
self.importable = []
# get from top entity in hierarchy all parent entities
top_entity = entities[0]['link']
if len(top_entity) > 1:
for e in top_entity:
parent_entity = session.get(e['type'], e['id'])
self.importable.append(parent_entity)
# get all child entities separately/unique
for entity in entities:
self.getShotAsset(entity)
# Check names: REGEX in schema/duplicates - raise error if found
all_names = []
duplicates = []
for e in self.importable:
ftrack_utils.avalon_check_name(e)
if e['name'] in all_names:
duplicates.append("'{}'".format(e['name']))
else:
all_names.append(e['name'])
if len(duplicates) > 0:
raise ValueError("Entity name duplication: {}".format(", ".join(duplicates)))
## ----- PROJECT ------
# store Ftrack project- self.importable[0] must be project entity!!!
self.entityProj = self.importable[0]
# set AVALON_ env
os.environ["AVALON_PROJECT"] = self.entityProj["full_name"]
os.environ["AVALON_ASSET"] = self.entityProj["full_name"]
self.avalon_project = None
io.install()
# Import all entities to Avalon DB
for e in self.importable:
self.importToAvalon(session, e)
io.uninstall()
job['status'] = 'done'
session.commit()
self.log.info('Synchronization to Avalon was successfull!')
except ValueError as ve:
job['status'] = 'failed'
session.commit()
message = str(ve)
self.log.error('Error during syncToAvalon: {}'.format(message))
except Exception as e:
job['status'] = 'failed'
session.commit()
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
log_message = "{}/{}/Line: {}".format(exc_type, fname, exc_tb.tb_lineno)
self.log.error('Error during syncToAvalon: {}'.format(log_message))
message = 'Unexpected Error - Please check Log for more information'
if len(message) > 0:
message = "Unable to sync: {}".format(message)
return {
'success': False,
'message': message
}
return {
'success': True,
'message': "Synchronization was successfull"
}
def setAvalonAttributes(self):
self.custom_attributes = []
all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' not in cust_attr['key']:
self.custom_attributes.append(cust_attr)
def getShotAsset(self, entity):
if not (entity.entity_type in ['Task']):
if entity not in self.importable:
self.importable.append(entity)
if entity['children']:
childrens = entity['children']
for child in childrens:
self.getShotAsset(child)
def importToAvalon(self, session, entity):
# --- Begin: PUSH TO Avalon ---
entity_type = entity.entity_type
if entity_type.lower() in ['project']:
# Set project Config
config = ftrack_utils.get_config(entity)
# Set project template
template = lib.get_avalon_project_template_schema()
if self.ca_mongoid in entity['custom_attributes']:
try:
projectId = ObjectId(self.entityProj['custom_attributes'][self.ca_mongoid])
self.avalon_project = io.find_one({"_id": projectId})
except:
self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name']))
if self.avalon_project is None:
self.avalon_project = io.find_one({
"type": "project",
"name": entity["full_name"]
})
if self.avalon_project is None:
inventory.save(entity['full_name'], config, template)
self.avalon_project = io.find_one({
"type": "project",
"name": entity["full_name"]
})
elif self.avalon_project['name'] != entity['full_name']:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name))
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
# Store info about project (FtrackId)
io.update_many({
'type': 'project',
'name': entity['full_name']
}, {
'$set':{'data':data, 'config':config}
})
self.projectId = self.avalon_project["_id"]
if self.ca_mongoid in entity['custom_attributes']:
entity['custom_attributes'][self.ca_mongoid] = str(self.projectId)
else:
self.log.error('Custom attribute for "{}" is not created.'.format(entity['name']))
return
## ----- ASSETS ------
# Presets:
data = ftrack_utils.get_data(self, entity, session, self.custom_attributes)
# return if entity is silo
if len(data['parents']) == 0:
return
else:
silo = data['parents'][0]
os.environ['AVALON_SILO'] = silo
name = entity['name']
os.environ['AVALON_ASSET'] = name
# Try to find asset in current database
avalon_asset = None
if self.ca_mongoid in entity['custom_attributes']:
try:
entityId = ObjectId(entity['custom_attributes'][self.ca_mongoid])
avalon_asset = io.find_one({"_id": entityId})
except:
self.log.debug("Entity {} don't have stored entity id in ftrack".format(entity['name']))
if avalon_asset is None:
avalon_asset = io.find_one({'type': 'asset', 'name': name})
# Create if don't exists
if avalon_asset is None:
inventory.create_asset(name, silo, data, self.projectId)
self.log.debug("Asset {} - created".format(name))
# Raise error if it seems to be different ent. with same name
elif (avalon_asset['data']['parents'] != data['parents'] or
avalon_asset['silo'] != silo):
raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name))
elif avalon_asset['name'] != entity['name']:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name))
elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']:
old_path = "/".join(avalon_asset['data']['parents'])
new_path = "/".join(data['parents'])
raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" '.format(avalon_asset['name'], old_path, new_path))
# Update info
io.update_many({'type': 'asset','name': name},
{'$set':{'data':data, 'silo': silo}})
self.log.debug("Asset {} - updated".format(name))
entityId = io.find_one({'type': 'asset', 'name': name})['_id']
## FTRACK FEATURE - FTRACK MUST HAVE avalon_mongo_id FOR EACH ENTITY TYPE EXCEPT TASK
# Set custom attribute to avalon/mongo id of entity (parentID is last)
if self.ca_mongoid in entity['custom_attributes']:
entity['custom_attributes'][self.ca_mongoid] = str(entityId)
else:
self.log.error("Custom attribute for <{}> is not created.".format(entity['name']))
session.commit()
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
# Validate that session is an instance of ftrack_api.Session. If not,
# assume that register is being called from an old or incompatible API and
# return without doing anything.
if not isinstance(session, ftrack_api.session.Session):
return
action_handler = Sync_To_Avalon(session)
action_handler.register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -0,0 +1,225 @@
import os
import sys
import re
import ftrack_api
from ftrack_event_handler import BaseEvent
from pype import lib
from avalon import io, inventory
from avalon.vendor import toml
from bson.objectid import ObjectId
from pype.ftrack import ftrack_utils
class Sync_to_Avalon(BaseEvent):
def launch(self, session, entities, event):
self.ca_mongoid = 'avalon_mongo_id'
# If mongo_id textfield has changed: RETURN!
# - infinite loop
for ent in event['data']['entities']:
if 'keys' in ent:
if self.ca_mongoid in ent['keys']:
return
self.proj = None
# get project
for entity in entities:
try:
base_proj = entity['link'][0]
except:
continue
self.proj = session.get(base_proj['type'], base_proj['id'])
break
# check if project is set to auto-sync
if (self.proj is None or
'avalon_auto_sync' not in self.proj['custom_attributes'] or
self.proj['custom_attributes']['avalon_auto_sync'] is False):
return
# check if project have Custom Attribute 'avalon_mongo_id'
if self.ca_mongoid not in self.proj['custom_attributes']:
message = "Custom attribute '{}' for 'Project' is not created or don't have set permissions for API".format(self.ca_mongoid)
self.log.warning(message)
self.show_message(event, message, False)
return
self.projectId = self.proj['custom_attributes'][self.ca_mongoid]
os.environ["AVALON_PROJECT"] = self.proj['full_name']
# get avalon project if possible
io.install()
try:
self.avalon_project = io.find_one({"_id": ObjectId(self.projectId)})
except:
self.avalon_project = None
importEntities = []
if self.avalon_project is None:
self.avalon_project = io.find_one({"type": "project", "name": self.proj["full_name"]})
if self.avalon_project is None:
importEntities.append(self.proj)
else:
self.projectId = self.avalon_project['_id']
io.uninstall()
for entity in entities:
if entity.entity_type.lower() in ['task']:
entity = entity['parent']
try:
mongo_id = entity['custom_attributes'][self.ca_mongoid]
except:
message = "Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity.entity_type)
self.log.warning(message)
self.show_message(event, message, False)
return
if entity not in importEntities:
importEntities.append(entity)
if len(importEntities) < 1:
return
self.setAvalonAttributes()
io.install()
try:
for entity in importEntities:
self.importToAvalon(session, entity)
session.commit()
except ValueError as ve:
message = str(ve)
self.show_message(event, message, False)
self.log.warning(message)
except Exception as e:
message = str(e)
ftrack_message = "SyncToAvalon event ended with unexpected error please check log file for more information."
self.show_message(event, ftrack_message, False)
self.log.error(message)
io.uninstall()
return
def importToAvalon(self, session, entity):
if self.ca_mongoid not in entity['custom_attributes']:
raise ValueError("Custom attribute '{}' for '{}' is not created or don't have set permissions for API".format(self.ca_mongoid, entity['name']))
ftrack_utils.avalon_check_name(entity)
entity_type = entity.entity_type
if entity_type in ['Project']:
type = 'project'
name = entity['full_name']
config = ftrack_utils.get_config(entity)
template = lib.get_avalon_project_template_schema()
if self.avalon_project is None:
inventory.save(name, config, template)
self.avalon_project = io.find_one({'type': 'project', 'name': name})
elif self.avalon_project['name'] != name:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly!'.format(self.avalon_project['name'], name))
self.projectId = self.avalon_project['_id']
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
io.update_many(
{"_id": ObjectId(self.projectId)},
{'$set':{
'name':name,
'config':config,
'data':data,
}})
entity['custom_attributes'][self.ca_mongoid] = str(self.projectId)
return
if self.avalon_project is None:
self.importToAvalon(session, self.proj)
data = ftrack_utils.get_data(self, entity, session,self.custom_attributes)
# return if entity is silo
if len(data['parents']) == 0:
return
else:
silo = data['parents'][0]
name = entity['name']
os.environ["AVALON_ASSET"] = name
os.environ['AVALON_SILO'] = silo
avalon_asset = None
# existence of this custom attr is already checked
mongo_id = entity['custom_attributes'][self.ca_mongoid]
if mongo_id is not "":
avalon_asset = io.find_one({'_id': ObjectId(mongo_id)})
if avalon_asset is None:
avalon_asset = io.find_one({'type': 'asset', 'name': name})
if avalon_asset is None:
mongo_id = inventory.create_asset(name, silo, data, ObjectId(self.projectId))
# Raise error if it seems to be different ent. with same name
elif (avalon_asset['data']['parents'] != data['parents'] or
avalon_asset['silo'] != silo):
raise ValueError('In Avalon DB already exists entity with name "{0}"'.format(name))
elif avalon_asset['name'] != entity['name']:
raise ValueError('You can\'t change name {} to {}, avalon DB won\'t work properly - please set name back'.format(avalon_asset['name'], name))
elif avalon_asset['silo'] != silo or avalon_asset['data']['parents'] != data['parents']:
old_path = "/".join(avalon_asset['data']['parents'])
new_path = "/".join(data['parents'])
raise ValueError('You can\'t move with entities. Entity "{}" was moved from "{}" to "{}" , avalon DB won\'t work properly'.format(avalon_asset['name'], old_path, new_path))
io.update_many(
{"_id": ObjectId(mongo_id)},
{'$set':{
'name':name,
'silo':silo,
'data':data,
'parent': ObjectId(self.projectId)}})
entity['custom_attributes'][self.ca_mongoid] = str(mongo_id)
def setAvalonAttributes(self):
self.custom_attributes = []
all_avalon_attr = self.session.query('CustomAttributeGroup where name is "avalon"').one()
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
if 'avalon_' not in cust_attr['key']:
self.custom_attributes.append(cust_attr)
def _translate_event(self, session, event):
exceptions = ['assetversion', 'job', 'user', 'reviewsessionobject', 'timer', 'socialfeed', 'timelog']
_selection = event['data'].get('entities',[])
_entities = list()
for entity in _selection:
if entity['entityType'] in exceptions:
continue
_entities.append(
(
session.get(self._get_entity_type(entity), entity.get('entityId'))
)
)
return [_entities, event]
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
event = Sync_to_Avalon(session)
event.register()

View file

@ -0,0 +1,25 @@
import os
import sys
import re
import ftrack_api
from ftrack_event_handler import BaseEvent
from app import api
class Test_Event(BaseEvent):
def launch(self, session, entities, event):
'''just a testing event'''
# self.log.info(event)
return True
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
event = Test_Event(session)
event.register()

View file

@ -0,0 +1,154 @@
# :coding: utf-8
# :copyright: Copyright (c) 2017 ftrack
import os
import logging
import getpass
# import platform
import ftrack_api
import toml
from avalon import io, lib, pipeline
from avalon import session as sess
import acre
from app.api import (
Templates,
Logger
)
class BaseEvent(object):
'''Custom Event base class
BaseEvent is based on ftrack.update event
- get entities from event
If want to use different event base
- override register and *optional _translate_event method
'''
def __init__(self, session):
'''Expects a ftrack_api.Session instance'''
self.log = Logger.getLogger(self.__class__.__name__)
self._session = session
@property
def session(self):
'''Return current session.'''
return self._session
def register(self):
'''Registers the event, subscribing the the discover and launch topics.'''
self.session.event_hub.subscribe('topic=ftrack.update', self._launch)
self.log.info("Event '{}' - Registered successfully".format(self.__class__.__name__))
def _translate_event(self, session, event):
'''Return *event* translated structure to be used with the API.'''
_selection = event['data'].get('entities',[])
_entities = list()
for entity in _selection:
if entity['entityType'] in ['socialfeed']:
continue
_entities.append(
(
session.get(self._get_entity_type(entity), entity.get('entityId'))
)
)
return [
_entities,
event
]
def _get_entity_type(self, entity):
'''Return translated entity type tht can be used with API.'''
# Get entity type and make sure it is lower cased. Most places except
# the component tab in the Sidebar will use lower case notation.
entity_type = entity.get('entityType').replace('_', '').lower()
for schema in self.session.schemas:
alias_for = schema.get('alias_for')
if (
alias_for and isinstance(alias_for, str) and
alias_for.lower() == entity_type
):
return schema['id']
for schema in self.session.schemas:
if schema['id'].lower() == entity_type:
return schema['id']
raise ValueError(
'Unable to translate entity type: {0}.'.format(entity_type)
)
def _launch(self, event):
self.session.reset()
args = self._translate_event(
self.session, event
)
self.launch(
self.session, *args
)
return
def launch(self, session, entities, event):
'''Callback method for the custom action.
return either a bool ( True if successful or False if the action failed )
or a dictionary with they keys `message` and `success`, the message should be a
string and will be displayed as feedback to the user, success should be a bool,
True if successful or False if the action failed.
*session* is a `ftrack_api.Session` instance
*entities* is a list of tuples each containing the entity type and the entity id.
If the entity is a hierarchical you will always get the entity
type TypedContext, once retrieved through a get operation you
will have the "real" entity type ie. example Shot, Sequence
or Asset Build.
*event* the unmodified original event
'''
raise NotImplementedError()
def show_message(self, event, input_message, result = False):
"""
Shows message to user who triggered event
- event - just source of user id
- input_message - message that is shown to user
- result - changes color of message (based on ftrack settings)
- True = Violet
- False = Red
"""
if not isinstance(result, bool):
result = False
try:
message = str(input_message)
except:
return
user_id = event['source']['user']['id']
self.session.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
data=dict(
type='message',
success=result,
message=message
),
target='applicationId=ftrack.client.web and user.id="{0}"'.format(user_id)
),
on_error='ignore'
)

View file

@ -1,28 +0,0 @@
# import ftrack_api as local session
import ftrack_api
from utils import print_entity_head
#
session = ftrack_api.Session()
# ----------------------------------
def test_event(event):
'''just a testing event'''
# start of event procedure ----------------------------------
for entity in event['data'].get('entities', []):
if entity['entityType'] == 'task' and entity['action'] == 'update':
print "\n\nevent script: {}".format(__file__)
print_entity_head.print_entity_head(entity, session)
# for k in task.keys():
# print k, task[k]
# print '\n'
# print task['assignments']
for e in entity.keys():
print '{0}: {1}'.format(e, entity[e])
# end of event procedure ----------------------------------

View file

@ -7,11 +7,19 @@ import time
from app import style
from app.vendor.Qt import QtCore, QtGui, QtWidgets
from pype.ftrack import credentials, login_dialog as login_dialog
from app.api import Logger
from FtrackServer import FtrackServer
log = Logger.getLogger(__name__)
from pype import api as pype
# load data from templates
pype.load_data_from_templates()
log = pype.Logger.getLogger(__name__, "ftrack")
# Validation if alredy logged into Ftrack
class FtrackRunner:
def __init__(self, main_parent=None, parent=None):
@ -76,7 +84,7 @@ class FtrackRunner:
def runActionServer(self):
if self.actionThread is None:
self.actionThread = threading.Thread(target=self.setActionServer)
self.actionThread.daemon=True
self.actionThread.daemon = True
self.actionThread.start()
log.info("Ftrack action server launched")
@ -107,7 +115,7 @@ class FtrackRunner:
def runEventServer(self):
if self.eventThread is None:
self.eventThread = threading.Thread(target=self.setEventServer)
self.eventThread.daemon=True
self.eventThread.daemon = True
self.eventThread.start()
log.info("Ftrack event server launched")
@ -168,9 +176,9 @@ class FtrackRunner:
self.smEventS.addAction(self.aStopEventS)
# Actions - basic
self.aLogin = QtWidgets.QAction("Login",self.menu)
self.aLogin = QtWidgets.QAction("Login", self.menu)
self.aLogin.triggered.connect(self.validate)
self.aLogout = QtWidgets.QAction("Logout",self.menu)
self.aLogout = QtWidgets.QAction("Logout", self.menu)
self.aLogout.triggered.connect(self.logout)
self.menu.addAction(self.aLogin)

View file

@ -1,14 +1,145 @@
# fttrack help functions
import ftrack_api
import os
import sys
import re
from pprint import *
import ftrack_api
from pype import lib
import avalon.io as io
import avalon.api
import avalon
from avalon.vendor import toml, jsonschema
from app.api import Logger
def checkLogin():
# check Environments FTRACK_API_USER, FTRACK_API_KEY
pass
log = Logger.getLogger(__name__)
def get_data(parent, entity, session, custom_attributes):
entity_type = entity.entity_type
data = {}
data['ftrackId'] = entity['id']
data['entityType'] = entity_type
for cust_attr in custom_attributes:
key = cust_attr['key']
if cust_attr['entity_type'].lower() in ['asset']:
data[key] = entity['custom_attributes'][key]
elif cust_attr['entity_type'].lower() in ['show'] and entity_type.lower() == 'project':
data[key] = entity['custom_attributes'][key]
elif cust_attr['entity_type'].lower() in ['task'] and entity_type.lower() != 'project':
# Put space between capitals (e.g. 'AssetBuild' -> 'Asset Build')
entity_type_full = re.sub(r"(\w)([A-Z])", r"\1 \2", entity_type)
# Get object id of entity type
ent_obj_type_id = session.query('ObjectType where name is "{}"'.format(entity_type_full)).one()['id']
if cust_attr['object_type_id'] == ent_obj_type_id:
data[key] = entity['custom_attributes'][key]
if entity_type in ['Project']:
data['code'] = entity['name']
return data
# Get info for 'Data' in Avalon DB
tasks = []
for child in entity['children']:
if child.entity_type in ['Task']:
tasks.append(child['name'])
# Get list of parents without project
parents = []
folderStruct = []
for i in range(1, len(entity['link'])-1):
parEnt = session.get(entity['link'][i]['type'], entity['link'][i]['id'])
parName = parEnt['name']
folderStruct.append(parName)
if i > 1:
parents.append(parEnt)
parentId = None
for parent in parents:
parentId = io.find_one({'type': 'asset', 'name': parName})['_id']
if parent['parent'].entity_type != 'project' and parentId is None:
parent.importToAvalon(session, parent)
parentId = io.find_one({'type': 'asset', 'name': parName})['_id']
hierarchy = os.path.sep.join(folderStruct)
data['visualParent'] = parentId
data['parents'] = folderStruct
data['tasks'] = tasks
data['hierarchy'] = hierarchy
return data
def avalon_check_name(entity, inSchema = None):
ValidationError = jsonschema.ValidationError
alright = True
name = entity['name']
if " " in name:
alright = False
data = {}
data['data'] = {}
data['type'] = 'asset'
schema = "avalon-core:asset-2.0"
# TODO have project any REGEX check?
if entity.entity_type in ['Project']:
# data['type'] = 'project'
name = entity['full_name']
# schema = get_avalon_project_template_schema()['schema']
# elif entity.entity_type in ['AssetBuild','Library']:
# data['silo'] = 'Assets'
# else:
# data['silo'] = 'Film'
data['silo'] = 'Film'
if inSchema is not None:
schema = inSchema
data['schema'] = schema
data['name'] = name
try:
avalon.schema.validate(data)
except ValidationError:
alright = False
if alright is False:
raise ValueError("{} includes unsupported symbols like 'dash' or 'space'".format(name))
def get_apps(entity):
""" Get apps from project
Requirements:
'Entity' MUST be object of ftrack entity with entity_type 'Project'
Checking if app from ftrack is available in Templates/bin/{app_name}.toml
Returns:
Array with dictionaries with app Name and Label
"""
apps = []
for app in entity['custom_attributes']['applications']:
try:
app_config = {}
app_config['name'] = app
app_config['label'] = toml.load(avalon.lib.which_app(app))['label']
apps.append(app_config)
except Exception as e:
log.warning('Error with application {0} - {1}'.format(app, e))
return apps
def get_config(entity):
config = {}
config['schema'] = lib.get_avalon_project_config_schema()
config['tasks'] = [{'name': ''}]
config['apps'] = get_apps(entity)
config['template'] = lib.get_avalon_project_template()
return config
def checkRegex():
# _handle_result -> would be solution?

View file

@ -9,6 +9,7 @@ from .vendor.pather.error import ParseError
import avalon.io as io
import avalon.api
import avalon
log = logging.getLogger(__name__)
@ -335,3 +336,31 @@ def get_asset_data(asset=None):
data = document.get("data", {})
return data
def get_avalon_project_config_schema():
schema = 'avalon-core:config-1.0'
return schema
def get_avalon_project_template_schema():
schema = {"schema": "avalon-core:inventory-1.0"}
return schema
def get_avalon_project_template():
from app.api import Templates
"""Get avalon template
Returns:
dictionary with templates
"""
template = Templates(type=["anatomy"])
proj_template = {}
proj_template['workfile'] = '{asset[name]}_{task[name]}_v{version:0>3}<_{comment}>'
proj_template['work'] = '{root}/{project}/{hierarchy}/{asset}/work/{task}'
proj_template['publish'] = '{root}/{project}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version}/{projectcode}_{asset}_{subset}_v{version}.{representation}'
# TODO this down should work but it can't be in default.toml:
# - Raises error when App (e.g. Nuke) is started
# proj_template['workfile'] = template.anatomy.avalon.workfile
# proj_template['work'] = template.anatomy.avalon.work
# proj_template['publish'] = template.anatomy.avalon.publish
return proj_template

View file

@ -1,8 +1,29 @@
import os
import sys
from avalon import api as avalon
from pyblish import api as pyblish
from .. import api
from pype.nuke import menu
from .lib import (
create_write_node
)
import nuke
# removing logger handler created in avalon_core
for name, handler in [(handler.get_name(), handler)
for handler in api.Logger.logging.root.handlers[:]]:
if "pype" not in str(name).lower():
api.Logger.logging.root.removeHandler(handler)
log = api.Logger.getLogger(__name__, "nuke")
AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype")
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
@ -12,9 +33,76 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "nuke", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "nuke", "create")
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "nuke", "inventory")
self = sys.modules[__name__]
self.nLogger = None
class NukeHandler(api.Logger.logging.Handler):
'''
Nuke Handler - emits logs into nuke's script editor.
warning will emit nuke.warning()
critical and fatal would popup msg dialog to alert of the error.
'''
def __init__(self):
api.Logger.logging.Handler.__init__(self)
self.set_name("Pype_Nuke_Handler")
def emit(self, record):
# Formated message:
msg = self.format(record)
if record.levelname.lower() in [
"warning",
"critical",
"fatal",
"error"
]:
nuke.message(msg)
'''Adding Nuke Logging Handler'''
nuke_handler = NukeHandler()
if nuke_handler.get_name() \
not in [handler.get_name()
for handler in api.Logger.logging.root.handlers[:]]:
api.Logger.logging.getLogger().addHandler(nuke_handler)
if not self.nLogger:
self.nLogger = api.Logger
def reload_config():
"""Attempt to reload pipeline at run-time.
CAUTION: This is primarily for development and debugging purposes.
"""
import importlib
for module in (
"app",
"app.api",
"{}.api".format(AVALON_CONFIG),
"{}.templates".format(AVALON_CONFIG),
"{}.nuke.templates".format(AVALON_CONFIG),
"{}.nuke.menu".format(AVALON_CONFIG)
):
log.info("Reloading module: {}...".format(module))
module = importlib.import_module(module)
try:
reload(module)
except Exception:
importlib.reload(module)
def install():
print("Registering Nuke plug-ins..")
api.fill_avalon_workdir()
reload_config()
log.info("Registering Nuke plug-ins..")
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
@ -23,48 +111,55 @@ def install():
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
# Disable all families except for the ones we explicitly want to see
family_states = ["imagesequence",
"camera",
"pointcache"]
family_states = [
"write",
"lifeGroup",
"backdrop",
"imagesequence",
"mov"
"camera",
"pointcache",
]
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# # work files start at app start
# workfiles.show(
# os.environ["AVALON_WORKDIR"]
# )
menu.install()
# load data from templates
api.load_data_from_templates()
def uninstall():
print("Deregistering Nuke plug-ins..")
log.info("Deregistering Nuke plug-ins..")
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
# reset data from templates
api.reset_data_from_templates()
def on_pyblish_instance_toggled(instance, new_value, old_value):
"""Toggle saver tool passthrough states on instance toggles."""
from avalon.nuke import viewer_update_and_undo_stop, add_publish_knob, log
def on_pyblish_instance_toggled(instance, old_value, new_value):
"""Toggle node passthrough states on instance toggles."""
self.log.info("instance toggle: {}, old_value: {}, new_value:{} ".format(
instance, old_value, new_value))
writes = [n for n in instance if
n.Class() == "Write"]
if not writes:
return
from avalon.nuke import (
viewer_update_and_undo_stop,
add_publish_knob
)
# Whether instances should be passthrough based on new value
passthrough = not new_value
with viewer_update_and_undo_stop():
for n in writes:
try:
n["publish"].value()
except ValueError:
n = add_publish_knob(n)
log.info(" `Publish` knob was added to write node..")
current = n["publish"].value()
if current != passthrough:
n["publish"].setValue(passthrough)
with viewer_update_and_undo_stop():
n = instance[0]
try:
n["publish"].value()
except ValueError:
n = add_publish_knob(n)
log.info(" `Publish` knob was added to write node..")
n["publish"].setValue(new_value)

View file

@ -1,14 +1,309 @@
import sys
from collections import OrderedDict
from pprint import pprint
from avalon.vendor.Qt import QtGui
from avalon import api, io
import avalon.nuke
import pype.api as pype
import nuke
log = pype.Logger.getLogger(__name__, "nuke")
self = sys.modules[__name__]
self._project = None
def format_anatomy(data):
from .templates import (
get_anatomy
)
file = script_name()
anatomy = get_anatomy()
# TODO: perhaps should be in try!
padding = anatomy.render.padding
data.update({
"hierarchy": pype.get_hiearchy(),
"frame": "#"*padding,
"VERSION": pype.get_version_from_workfile(file)
})
# log.info("format_anatomy:anatomy: {}".format(anatomy))
return anatomy.format(data)
def script_name():
return nuke.root().knob('name').value()
def create_write_node(name, data):
from .templates import (
get_dataflow,
get_colorspace
)
nuke_dataflow_writes = get_dataflow(**data)
nuke_colorspace_writes = get_colorspace(**data)
try:
anatomy_filled = format_anatomy({
"subset": data["avalon"]["subset"],
"asset": data["avalon"]["asset"],
"task": pype.get_task(),
"family": data["avalon"]["family"],
"project": {"name": pype.get_project_name(),
"code": pype.get_project_code()},
"representation": nuke_dataflow_writes.file_type,
})
except Exception as e:
log.error("problem with resolving anatomy tepmlate: {}".format(e))
log.debug("anatomy_filled.render: {}".format(anatomy_filled.render))
_data = OrderedDict({
"file": str(anatomy_filled.render.path).replace("\\", "/")
})
# adding dataflow template
{_data.update({k: v})
for k, v in nuke_dataflow_writes.items()
if k not in ["id", "previous"]}
# adding dataflow template
{_data.update({k: v})
for k, v in nuke_colorspace_writes.items()}
_data = avalon.nuke.lib.fix_data_for_node_create(_data)
log.debug(_data)
_data["frame_range"] = data.get("frame_range", None)
instance = avalon.nuke.lib.add_write_node(
name,
**_data
)
instance = avalon.nuke.lib.imprint(instance, data["avalon"])
add_rendering_knobs(instance)
return instance
def add_rendering_knobs(node):
if "render" not in node.knobs():
knob = nuke.Boolean_Knob("render", "Render")
knob.setFlag(0x1000)
knob.setValue(False)
node.addKnob(knob)
if "render_farm" not in node.knobs():
knob = nuke.Boolean_Knob("render_farm", "Render on Farm")
knob.setValue(False)
node.addKnob(knob)
return node
def set_viewers_colorspace(viewer):
assert isinstance(viewer, dict), log.error(
"set_viewers_colorspace(): argument should be dictionary")
filter_knobs = [
"viewerProcess",
"wipe_position"
]
viewers = [n for n in nuke.allNodes() if n.Class() == 'Viewer']
erased_viewers = []
for v in viewers:
v['viewerProcess'].setValue(str(viewer.viewerProcess))
if str(viewer.viewerProcess) not in v['viewerProcess'].value():
copy_inputs = v.dependencies()
copy_knobs = {k: v[k].value() for k in v.knobs()
if k not in filter_knobs}
pprint(copy_knobs)
# delete viewer with wrong settings
erased_viewers.append(v['name'].value())
nuke.delete(v)
# create new viewer
nv = nuke.createNode("Viewer")
# connect to original inputs
for i, n in enumerate(copy_inputs):
nv.setInput(i, n)
# set coppied knobs
for k, v in copy_knobs.items():
print(k, v)
nv[k].setValue(v)
# set viewerProcess
nv['viewerProcess'].setValue(str(viewer.viewerProcess))
if erased_viewers:
log.warning(
"Attention! Viewer nodes {} were erased."
"It had wrong color profile".format(erased_viewers))
def set_root_colorspace(root_dict):
assert isinstance(root_dict, dict), log.error(
"set_root_colorspace(): argument should be dictionary")
for knob, value in root_dict.items():
if nuke.root()[knob].value() not in value:
nuke.root()[knob].setValue(str(value))
log.info("nuke.root()['{}'] changed to: {}".format(knob, value))
def set_writes_colorspace(write_dict):
assert isinstance(write_dict, dict), log.error(
"set_root_colorspace(): argument should be dictionary")
log.info("set_writes_colorspace(): {}".format(write_dict))
def set_colorspace():
from pype import api as pype
nuke_colorspace = getattr(pype.Colorspace, "nuke", None)
try:
set_root_colorspace(nuke_colorspace.root)
except AttributeError:
log.error(
"set_colorspace(): missing `root` settings in template")
try:
set_viewers_colorspace(nuke_colorspace.viewer)
except AttributeError:
log.error(
"set_colorspace(): missing `viewer` settings in template")
try:
set_writes_colorspace(nuke_colorspace.write)
except AttributeError:
log.error(
"set_colorspace(): missing `write` settings in template")
try:
for key in nuke_colorspace:
log.info("{}".format(key))
except TypeError:
log.error("Nuke is not in templates! \n\n\n"
"contact your supervisor!")
def get_avalon_knob_data(node):
import toml
try:
data = toml.loads(node['avalon'].value())
except Exception:
return None
return data
def reset_resolution():
"""Set resolution to project resolution."""
log.info("Reseting resolution")
project = io.find_one({"type": "project"})
asset = api.Session["AVALON_ASSET"]
asset = io.find_one({"name": asset, "type": "asset"})
try:
width = asset["data"].get("resolution_width", 1920)
height = asset["data"].get("resolution_height", 1080)
pixel_aspect = asset["data"].get("pixel_aspect", 1)
bbox = asset["data"].get("crop", "0.0.1920.1080")
try:
x, y, r, t = bbox.split(".")
except Exception as e:
x = 0
y = 0
r = width
t = height
log.error("{}: {} \nFormat:Crop need to be set with dots, example: "
"0.0.1920.1080, /nSetting to default".format(__name__, e))
except KeyError:
log.warning(
"No resolution information found for \"{0}\".".format(
project["name"]
)
)
return
used_formats = list()
for f in nuke.formats():
if project["name"] in str(f.name()):
used_formats.append(f)
else:
format_name = project["name"] + "_1"
crnt_fmt_str = ""
if used_formats:
check_format = used_formats[-1]
format_name = "{}_{}".format(
project["name"],
int(used_formats[-1].name()[-1])+1
)
log.info(
"Format exists: {}. "
"Will create new: {}...".format(
used_formats[-1].name(),
format_name)
)
crnt_fmt_kargs = {
"width": (check_format.width()),
"height": (check_format.height()),
"x": int(check_format.x()),
"y": int(check_format.y()),
"r": int(check_format.r()),
"t": int(check_format.t()),
"pixel_aspect": float(check_format.pixelAspect())
}
crnt_fmt_str = make_format_string(**crnt_fmt_kargs)
log.info("crnt_fmt_str: {}".format(crnt_fmt_str))
new_fmt_kargs = {
"width": int(width),
"height": int(height),
"x": int(x),
"y": int(y),
"r": int(r),
"t": int(t),
"pixel_aspect": float(pixel_aspect),
"project_name": format_name
}
new_fmt_str = make_format_string(**new_fmt_kargs)
log.info("new_fmt_str: {}".format(new_fmt_str))
if new_fmt_str not in crnt_fmt_str:
make_format(frm_str=new_fmt_str,
project_name=new_fmt_kargs["project_name"])
log.info("Format is set")
def make_format_string(**args):
format_str = (
"{width} "
"{height} "
"{x} "
"{y} "
"{r} "
"{t} "
"{pixel_aspect:.2f}".format(**args)
)
return format_str
def make_format(**args):
log.info("Format does't exist, will create: \n{}".format(args))
nuke.addFormat("{frm_str} "
"{project_name}".format(**args))
nuke.root()["format"].setValue("{project_name}".format(**args))
# TODO: bellow functions are wip and needs to be check where they are used
# ------------------------------------
def update_frame_range(start, end, root=None):
"""Set Nuke script start and end frame range

21
pype/nuke/menu.py Normal file
View file

@ -0,0 +1,21 @@
import nuke
from avalon.api import Session
from pype.nuke import lib
def install():
menubar = nuke.menu("Nuke")
menu = menubar.findItem(Session["AVALON_LABEL"])
# replace reset resolution from avalon core to pype's
name = "Reset Resolution"
rm_item = [(i, item)
for i, item in enumerate(menu.items())
if name in item.name()][0]
menu.removeItem(rm_item[1].name())
menu.addCommand(rm_item[1].name(), lib.reset_resolution, index=rm_item[0])
# add colorspace menu item
menu.addCommand("Set colorspace...", lib.set_colorspace, index=rm_item[0]+1)

41
pype/nuke/templates.py Normal file
View file

@ -0,0 +1,41 @@
from pype import api as pype
log = pype.Logger.getLogger(__name__, "nuke")
def get_anatomy(**kwarg):
return pype.Anatomy
def get_dataflow(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "nuke")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("nuke.templates.get_dataflow():"
"Missing mandatory kwargs `host`, `cls`")
nuke_dataflow = getattr(pype.Dataflow, str(host), None)
nuke_dataflow_node = getattr(nuke_dataflow.nodes, str(cls), None)
if preset:
nuke_dataflow_node = getattr(nuke_dataflow_node, str(preset), None)
log.info("Dataflow: {}".format(nuke_dataflow_node))
return nuke_dataflow_node
def get_colorspace(**kwarg):
log.info(kwarg)
host = kwarg.get("host", "nuke")
cls = kwarg.get("class", None)
preset = kwarg.get("preset", None)
assert any([host, cls]), log.error("nuke.templates.get_colorspace():"
"Missing mandatory kwargs `host`, `cls`")
nuke_colorspace = getattr(pype.Colorspace, str(host), None)
nuke_colorspace_node = getattr(nuke_colorspace, str(cls), None)
if preset:
nuke_colorspace_node = getattr(nuke_colorspace_node, str(preset), None)
log.info("Colorspace: {}".format(nuke_colorspace_node))
return nuke_colorspace_node

View file

@ -1,5 +1,6 @@
import pyblish.api
import os
import clique
class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
@ -20,7 +21,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'rig': 'rig',
'setdress': 'setdress',
'pointcache': 'cache',
'review': 'mov'}
'review': 'mov',
'write': 'img',
'render': 'render'}
def process(self, instance):
@ -36,20 +39,25 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
componentList = []
transfers = instance.data["transfers"]
dst_list = instance.data['destination_list']
ft_session = instance.context.data["ftrackSession"]
location = ft_session.query(
'Location where name is "ftrack.unmanaged"').one()
self.log.debug('location {}'.format(location))
for src, dest in transfers:
filename, ext = os.path.splitext(src)
self.log.debug('source filename: ' + filename)
self.log.debug('source ext: ' + ext)
for file in instance.data['destination_list']:
self.log.debug('file {}'.format(file))
for file in dst_list:
filename, ext = os.path.splitext(file)
self.log.debug('dest ext: ' + ext)
componentList.append({"assettype_data": {
"short": asset_type,
},
"asset_data": {
"name": instance.data["subset"],
},
"assetversion_data": {
"version": version_number,
@ -57,7 +65,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
"component_data": {
"name": ext[1:], # Default component name is "main".
},
"component_path": dest,
"component_path": file,
'component_location': location,
"component_overwrite": False,
}

View file

@ -18,7 +18,7 @@ def open(filepath):
class PlayImageSequence(api.Loader):
"""Open Image Sequence with system default"""
families = ["imagesequence"]
families = ["write"]
representations = ["*"]
label = "Play sequence"

View file

@ -1,7 +1,7 @@
import pyblish.api
class CollectColorbleedComment(pyblish.api.ContextPlugin):
class CollectComment(pyblish.api.ContextPlugin):
"""This plug-ins displays the comment dialog box per default"""
label = "Collect Comment"

View file

@ -18,6 +18,3 @@ class CollectTemplates(pyblish.api.ContextPlugin):
type=["anatomy"]
)
context.data['anatomy'] = templates.anatomy
for key in templates.anatomy:
self.log.info(str(key) + ": " + str(templates.anatomy[key]))
# return

View file

@ -2,7 +2,7 @@ import pyblish.api
from avalon import api
class CollectMindbenderTime(pyblish.api.ContextPlugin):
class CollectTime(pyblish.api.ContextPlugin):
"""Store global time at the time of publish"""
label = "Collect Current Time"

View file

@ -83,8 +83,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"},
projection={"config.template.publish": True})
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset",
"name": ASSET,
@ -137,12 +136,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# \|________|
#
root = api.registered_root()
# template_data = {"root": root,
# "project": PROJECT,
# "silo": asset['silo'],
# "asset": ASSET,
# "subset": subset["name"],
# "version": version["name"]}
hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
@ -150,7 +143,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
template_data = {"root": root,
"project": {"name": PROJECT,
"code": "prjX"},
"code": project['data']['code']},
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
@ -164,6 +157,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
for files in instance.data["files"]:
@ -196,6 +190,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
dst = anatomy_filled.publish.path
instance.data["transfers"].append([src, dst])
template = anatomy.publish.path
else:
# Single file
@ -219,13 +214,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
dst = anatomy_filled.publish.path
instance.data["transfers"].append([src, dst])
template = anatomy.publish.path
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {'path': dst},
"data": {'path': dst, 'template': template},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
@ -233,7 +229,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"context": {
"root": root,
"project": PROJECT,
"projectcode": "prjX",
"projectcode": project['data']['code'],
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
@ -243,6 +240,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"representation": ext[1:]
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))

View file

@ -125,7 +125,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
hosts = ["fusion", "maya", "nuke"]
families = [
"saver.deadline",
"render.deadline",
"renderlayer",
"imagesequence"
]

View file

@ -0,0 +1 @@

View file

@ -0,0 +1,252 @@
import os
import contextlib
from avalon import api
import avalon.io as io
from avalon.nuke import log
import nuke
@contextlib.contextmanager
def preserve_inputs(node, knobs):
"""Preserve the node's inputs after context"""
values = {}
for name in knobs:
try:
knob_value = node[name].vaule()
values[name] = knob_value
except ValueError:
log.warning("missing knob {} in node {}"
"{}".format(name, node['name'].value()))
try:
yield
finally:
for name, value in values.items():
node[name].setValue(value)
@contextlib.contextmanager
def preserve_trim(node):
"""Preserve the relative trim of the Loader tool.
This tries to preserve the loader's trim (trim in and trim out) after
the context by reapplying the "amount" it trims on the clip's length at
start and end.
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
start_at_frame = None
offset_frame = None
if node['frame_mode'].value() == "start at":
start_at_frame = node['frame'].value()
if node['frame_mode'].value() is "offset":
offset_frame = node['frame'].value()
try:
yield
finally:
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start))
log.info("start frame of reader was set to"
"{}".format(script_start))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str((script_start + offset_frame)))
log.info("start frame of reader was set to"
"{}".format(script_start))
def loader_shift(node, frame, relative=True):
"""Shift global in time by i preserving duration
This moves the loader by i frames preserving global duration. When relative
is False it will shift the global in to the start frame.
Args:
loader (tool): The fusion loader tool.
frame (int): The amount of frames to move.
relative (bool): When True the shift is relative, else the shift will
change the global in to frame.
Returns:
int: The resulting relative frame change (how much it moved)
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
if node['frame_mode'].value() == "start at":
start_at_frame = node['frame'].value()
if node['frame_mode'].value() is "offset":
offset_frame = node['frame'].value()
if relative:
shift = frame
else:
if start_at_frame:
shift = frame
if offset_frame:
shift = frame + offset_frame
# Shifting global in will try to automatically compensate for the change
# in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
# input values to "just shift" the clip
with preserve_inputs(node, knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
# GlobalIn cannot be set past GlobalOut or vice versa
# so we must apply them in the order of the shift.
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start + shift))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str(shift))
return int(shift)
class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["write"]
representations = ["*"]
label = "Load sequence"
order = -10
icon = "code-fork"
color = "orange"
def load(self, context, name, namespace, data):
from avalon.nuke import (
containerise,
ls_img_sequence,
viewer_update_and_undo_stop
)
log.info("here i am")
# Fallback to asset name when namespace is None
if namespace is None:
namespace = context['asset']['name']
# Use the first file for now
# TODO: fix path fname
file = ls_img_sequence(os.path.dirname(self.fname), one=True)
# Create the Loader with the filename path set
with viewer_update_and_undo_stop():
# TODO: it might be universal read to img/geo/camera
r = nuke.createNode(
"Read",
"name {}".format(self.name)) # TODO: does self.name exist?
r["file"].setValue(file['path'])
if len(file['frames']) is 1:
first = file['frames'][0][0]
last = file['frames'][0][1]
r["originfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["last"].setValue(last)
else:
first = file['frames'][0][0]
last = file['frames'][:-1][1]
r["originfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["last"].setValue(last)
log.warning("Missing frames in image sequence")
# Set global in point to start frame (if in version.data)
start = context["version"]["data"].get("startFrame", None)
if start is not None:
loader_shift(r, start, relative=False)
containerise(r,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__)
def switch(self, container, representation):
self.update(container, representation)
def update(self, container, representation):
"""Update the Loader's path
Fusion automatically tries to reset some variables when changing
the loader's path to a new file. These automatic changes are to its
inputs:
"""
from avalon.nuke import (
viewer_update_and_undo_stop,
ls_img_sequence,
update_container
)
log.info("this i can see")
node = container["_tool"]
# TODO: prepare also for other readers img/geo/camera
assert node.Class() == "Reader", "Must be Reader"
root = api.get_representation_path(representation)
file = ls_img_sequence(os.path.dirname(root), one=True)
# Get start frame from version data
version = io.find_one({"type": "version",
"_id": representation["parent"]})
start = version["data"].get("startFrame")
if start is None:
log.warning("Missing start frame for updated version"
"assuming starts at frame 0 for: "
"{} ({})".format(node['name'].value(), representation))
start = 0
with viewer_update_and_undo_stop():
# Update the loader's path whilst preserving some values
with preserve_trim(node):
with preserve_inputs(node,
knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
node["file"] = file["path"]
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)
if global_in_changed:
# Log this change to the user
log.debug("Changed '{}' global in:"
" {:d}".format(node['name'].value(), start))
# Update the imprinted representation
update_container(
node,
{"representation": str(representation["_id"])}
)
def remove(self, container):
from avalon.nuke import viewer_update_and_undo_stop
node = container["_tool"]
assert node.Class() == "Reader", "Must be Reader"
with viewer_update_and_undo_stop():
nuke.delete(node)

View file

@ -0,0 +1 @@
# usually used for mattepainting

View file

@ -0,0 +1,47 @@
import pyblish.api
class CollectNukeRenderMode(pyblish.api.InstancePlugin):
# TODO: rewrite docstring to nuke
"""Collect current comp's render Mode
Options:
local
deadline
Note that this value is set for each comp separately. When you save the
comp this information will be stored in that file. If for some reason the
available tool does not visualize which render mode is set for the
current comp, please run the following line in the console (Py2)
comp.GetData("rendermode")
This will return the name of the current render mode as seen above under
Options.
"""
order = pyblish.api.CollectorOrder + 0.4
label = "Collect Render Mode"
hosts = ["nuke"]
families = ["write", "render.local"]
def process(self, instance):
"""Collect all image sequence tools"""
options = ["local", "deadline"]
node = instance[0]
if bool(node["render_local"].getValue()):
rendermode = "local"
else:
rendermode = "deadline"
assert rendermode in options, "Must be supported render mode"
# Append family
instance.data["families"].remove("render")
family = "render.{0}".format(rendermode)
instance.data["families"].append(family)
self.log.info("Render mode: {0}".format(rendermode))

View file

@ -0,0 +1,116 @@
import os
import nuke
import pyblish.api
class Extract(pyblish.api.InstancePlugin):
"""Super class for write and writegeo extractors."""
order = pyblish.api.ExtractorOrder
optional = True
label = "Extract Nuke [super]"
hosts = ["nuke"]
match = pyblish.api.Subset
# targets = ["process.local"]
def execute(self, instance):
# Get frame range
node = instance[0]
first_frame = nuke.root()["first_frame"].value()
last_frame = nuke.root()["last_frame"].value()
if node["use_limit"].value():
first_frame = node["first"].value()
last_frame = node["last"].value()
# Render frames
nuke.execute(node.name(), int(first_frame), int(last_frame))
class ExtractNukeWrite(Extract):
""" Extract output from write nodes. """
families = ["write", "local"]
label = "Extract Write"
def process(self, instance):
self.execute(instance)
# Validate output
for filename in list(instance.data["collection"]):
if not os.path.exists(filename):
instance.data["collection"].remove(filename)
self.log.warning("\"{0}\" didn't render.".format(filename))
class ExtractNukeCache(Extract):
label = "Cache"
families = ["cache", "local"]
def process(self, instance):
self.execute(instance)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg
class ExtractNukeCamera(Extract):
label = "Camera"
families = ["camera", "local"]
def process(self, instance):
node = instance[0]
node["writeGeometries"].setValue(False)
node["writePointClouds"].setValue(False)
node["writeAxes"].setValue(False)
file_path = node["file"].getValue()
node["file"].setValue(instance.data["output_path"])
self.execute(instance)
node["writeGeometries"].setValue(True)
node["writePointClouds"].setValue(True)
node["writeAxes"].setValue(True)
node["file"].setValue(file_path)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg
class ExtractNukeGeometry(Extract):
label = "Geometry"
families = ["geometry", "local"]
def process(self, instance):
node = instance[0]
node["writeCameras"].setValue(False)
node["writePointClouds"].setValue(False)
node["writeAxes"].setValue(False)
file_path = node["file"].getValue()
node["file"].setValue(instance.data["output_path"])
self.execute(instance)
node["writeCameras"].setValue(True)
node["writePointClouds"].setValue(True)
node["writeAxes"].setValue(True)
node["file"].setValue(file_path)
# Validate output
msg = "\"{0}\" didn't render.".format(instance.data["output_path"])
assert os.path.exists(instance.data["output_path"]), msg

View file

@ -0,0 +1,98 @@
import re
import os
import json
import subprocess
import pyblish.api
from pype.action import get_errored_plugins_from_data
def _get_script():
"""Get path to the image sequence script"""
# todo: use a more elegant way to get the python script
try:
from pype.fusion.scripts import publish_filesequence
except Exception:
raise RuntimeError("Expected module 'publish_imagesequence'"
"to be available")
module_path = publish_filesequence.__file__
if module_path.endswith(".pyc"):
module_path = module_path[:-len(".pyc")] + ".py"
return module_path
class PublishImageSequence(pyblish.api.InstancePlugin):
"""Publish the generated local image sequences."""
order = pyblish.api.IntegratorOrder
label = "Publish Rendered Image Sequence(s)"
hosts = ["fusion"]
families = ["saver.renderlocal"]
def process(self, instance):
# Skip this plug-in if the ExtractImageSequence failed
errored_plugins = get_errored_plugins_from_data(instance.context)
if any(plugin.__name__ == "FusionRenderLocal" for plugin in
errored_plugins):
raise RuntimeError("Fusion local render failed, "
"publishing images skipped.")
subset = instance.data["subset"]
ext = instance.data["ext"]
# Regex to match resulting renders
regex = "^{subset}.*[0-9]+{ext}+$".format(subset=re.escape(subset),
ext=re.escape(ext))
# The instance has most of the information already stored
metadata = {
"regex": regex,
"startFrame": instance.context.data["startFrame"],
"endFrame": instance.context.data["endFrame"],
"families": ["imagesequence"],
}
# Write metadata and store the path in the instance
output_directory = instance.data["outputDir"]
path = os.path.join(output_directory,
"{}_metadata.json".format(subset))
with open(path, "w") as f:
json.dump(metadata, f)
assert os.path.isfile(path), ("Stored path is not a file for %s"
% instance.data["name"])
# Suppress any subprocess console
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
process = subprocess.Popen(["python", _get_script(),
"--paths", path],
bufsize=1,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
startupinfo=startupinfo)
while True:
output = process.stdout.readline()
# Break when there is no output or a return code has been given
if output == '' and process.poll() is not None:
process.stdout.close()
break
if output:
line = output.strip()
if line.startswith("ERROR"):
self.log.error(line)
else:
self.log.info(line)
if process.returncode != 0:
raise RuntimeError("Process quit with non-zero "
"return code: {}".format(process.returncode))

View file

@ -0,0 +1,147 @@
import os
import json
import getpass
from avalon import api
from avalon.vendor import requests
import pyblish.api
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# TODO: rewrite docstring to nuke
"""Submit current Comp to Deadline
Renders are submitted to a Deadline Web Service as
supplied via the environment variable AVALON_DEADLINE
"""
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["nuke"]
families = ["write", "render.deadline"]
def process(self, instance):
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
if context.data.get(key, False):
return
else:
context.data[key] = True
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
"http://localhost:8082")
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
# Collect all saver instances in context that are to be rendered
write_instances = []
for instance in context[:]:
if not self.families[0] in instance.data.get("families"):
# Allow only saver family instances
continue
if not instance.data.get("publish", True):
# Skip inactive instances
continue
self.log.debug(instance.data["name"])
write_instances.append(instance)
if not write_instances:
raise RuntimeError("No instances found for Deadline submittion")
hostVersion = int(context.data["hostVersion"])
filepath = context.data["currentFile"]
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
deadline_user = context.data.get("deadlineUser", getpass.getuser())
# Documentation for keys available at:
# https://docs.thinkboxsoftware.com
# /products/deadline/8.0/1_User%20Manual/manual
# /manual-submission.html#job-info-file-options
payload = {
"JobInfo": {
# Top-level group name
"BatchName": filename,
# Job name, as seen in Monitor
"Name": filename,
# User, as seen in Monitor
"UserName": deadline_user,
# Use a default submission pool for Nuke
"Pool": "nuke",
"Plugin": "Nuke",
"Frames": "{start}-{end}".format(
start=int(instance.data["startFrame"]),
end=int(instance.data["endFrame"])
),
"Comment": comment,
},
"PluginInfo": {
# Input
"FlowFile": filepath,
# Mandatory for Deadline
"Version": str(hostVersion),
# Render in high quality
"HighQuality": True,
# Whether saver output should be checked after rendering
# is complete
"CheckOutput": True,
# Proxy: higher numbers smaller images for faster test renders
# 1 = no proxy quality
"Proxy": 1,
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
# Enable going to rendered frames from Deadline Monitor
for index, instance in enumerate(write_instances):
path = instance.data["path"]
folder, filename = os.path.split(path)
payload["JobInfo"]["OutputDirectory%d" % index] = folder
payload["JobInfo"]["OutputFilename%d" % index] = filename
# Include critical variables with submission
keys = [
# TODO: This won't work if the slaves don't have accesss to
# these paths, such as if slaves are running Linux and the
# submitter is on Windows.
"PYTHONPATH",
"NUKE_PATH"
# "OFX_PLUGIN_PATH",
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
url = "{}/api/jobs".format(AVALON_DEADLINE)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
# Store the response for dependent job submission plug-ins
for instance in write_instances:
instance.data["deadlineSubmissionJob"] = response.json()

View file

@ -0,0 +1,2 @@
# creates backdrop which is published as separate nuke script
# it is versioned by major version

View file

@ -0,0 +1,3 @@
# create vanilla camera if no camera is selected
# if camera is selected then it will convert it into containerized object
# it is major versioned in publish

View file

@ -0,0 +1,8 @@
# create publishable read node usually used for enabling version tracking
# also useful for sharing across shots or assets
# if read nodes are selected it will convert them to centainer
# if no read node selected it will create read node and offer browser to shot resource folder
# type movie > mov or imagesequence
# type still > matpaint .psd, .tif, .png,

View file

@ -0,0 +1,17 @@
# type: render
# if no render type node in script then first is having in name [master] for definition of main script renderer
# colorspace setting from templates
# dataflow setting from templates
# type: mask_render
# created with shuffle gizmo for RGB separation into davinci matte
# colorspace setting from templates
# dataflow setting from templates
# type: prerender
# backdrop with write and read
# colorspace setting from templates
# dataflow setting from templates
# type: geo
# dataflow setting from templates

View file

@ -0,0 +1,149 @@
from collections import OrderedDict
import avalon.api
import avalon.nuke
from pype.nuke import (
create_write_node
)
from pype import api as pype
import nuke
log = pype.Logger.getLogger(__name__, "nuke")
def subset_to_families(subset, family, families):
subset_sufx = str(subset).replace(family, "")
new_subset = families + subset_sufx
return "{}.{}".format(family, new_subset)
class CrateWriteRender(avalon.nuke.Creator):
# change this to template preset
preset = "render"
name = "WriteRender"
label = "Create Write Render"
hosts = ["nuke"]
family = "{}_write".format(preset)
families = preset
icon = "sign-out"
def __init__(self, *args, **kwargs):
super(CrateWriteRender, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family.split("_")[1]
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
if k not in data.keys()}
self.data = data
def process(self):
self.name = self.data["subset"]
family = self.family.split("_")[0]
node = self.family.split("_")[1]
instance = nuke.toNode(self.data["subset"])
if not instance:
write_data = {
"class": node,
"preset": family,
"avalon": self.data
}
create_write_node(self.data["subset"], write_data)
return
class CrateWritePrerender(avalon.nuke.Creator):
# change this to template preset
preset = "prerender"
name = "WritePrerender"
label = "Create Write Prerender"
hosts = ["nuke"]
family = "{}_write".format(preset)
families = preset
icon = "sign-out"
def __init__(self, *args, **kwargs):
super(CrateWritePrerender, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family.split("_")[1]
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
if k not in data.keys()}
self.data = data
def process(self):
self.name = self.data["subset"]
instance = nuke.toNode(self.data["subset"])
family = self.family.split("_")[0]
node = self.family.split("_")[1]
if not instance:
write_data = {
"class": node,
"preset": family,
"avalon": self.data
}
create_write_node(self.data["subset"], write_data)
return
class CrateWriteStill(avalon.nuke.Creator):
# change this to template preset
preset = "still"
name = "WriteStill"
label = "Create Write Still"
hosts = ["nuke"]
family = "{}_write".format(preset)
families = preset
icon = "image"
def __init__(self, *args, **kwargs):
super(CrateWriteStill, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family.split("_")[1]
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
if k not in data.keys()}
self.data = data
def process(self):
self.name = self.data["subset"]
instance = nuke.toNode(self.data["subset"])
family = self.family.split("_")[0]
node = self.family.split("_")[1]
if not instance:
write_data = {
"frame_range": [nuke.frame(), nuke.frame()],
"class": node,
"preset": family,
"avalon": self.data
}
nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame()))
create_write_node(self.data["subset"], write_data)
return

View file

@ -1,48 +0,0 @@
import os
import avalon.api
import avalon.nuke
import nuke
class CrateWriteExr(avalon.api.Creator):
name = "Write_exr"
label = "Create Write: exr"
hosts = ["nuke"]
family = "write"
icon = "sign-out"
# def __init__(self, *args, **kwargs):
# super(CrateWriteExr, self).__init__(*args, **kwargs)
# self.data.setdefault("subset", "this")
def process(self):
# nuke = getattr(sys.modules["__main__"], "nuke", None)
data = {}
ext = "exr"
# todo: improve method of getting current environment
# todo: pref avalon.Session over os.environ
workdir = os.path.normpath(os.environ["AVALON_WORKDIR"])
filename = "{}.####.exr".format(self.name)
filepath = os.path.join(
workdir,
"render",
ext,
filename
).replace("\\", "/")
with avalon.nuke.viewer_update_and_undo_stop():
w = nuke.createNode(
"Write",
"name {}".format(self.name))
# w.knob('colorspace').setValue()
w.knob('file').setValue(filepath)
w.knob('file_type').setValue(ext)
w.knob('datatype').setValue("16 bit half")
w.knob('compression').setValue("Zip (1 scanline)")
w.knob('create_directories').setValue(True)
w.knob('autocrop').setValue(True)
return data

View file

@ -1,7 +1,7 @@
from avalon import api
class NukeSelectContainers(api.InventoryAction):
class SelectContainers(api.InventoryAction):
label = "Select Containers"
icon = "mouse-pointer"

View file

@ -3,14 +3,17 @@
"""
from avalon import api
from pype.api import Logger
log = Logger.getLogger(__name__, "nuke")
class NukeSetFrameRangeLoader(api.Loader):
class SetFrameRangeLoader(api.Loader):
"""Specific loader of Alembic for the avalon.animation family"""
families = ["animation",
"camera",
"imagesequence",
"write",
"yeticache",
"pointcache"]
representations = ["*"]
@ -30,20 +33,21 @@ class NukeSetFrameRangeLoader(api.Loader):
start = version_data.get("startFrame", None)
end = version_data.get("endFrame", None)
log.info("start: {}, end: {}".format(start, end))
if start is None or end is None:
print("Skipping setting frame range because start or "
"end frame data is missing..")
log.info("Skipping setting frame range because start or "
"end frame data is missing..")
return
lib.update_frame_range(start, end)
class NukeSetFrameRangeWithHandlesLoader(api.Loader):
class SetFrameRangeWithHandlesLoader(api.Loader):
"""Specific loader of Alembic for the avalon.animation family"""
families = ["animation",
"camera",
"imagesequence",
"write",
"yeticache",
"pointcache"]
representations = ["*"]

View file

@ -4,28 +4,11 @@ import contextlib
from avalon import api
import avalon.io as io
from avalon.nuke import log
import nuke
@contextlib.contextmanager
def preserve_inputs(node, knobs):
"""Preserve the node's inputs after context"""
values = {}
for name in knobs:
try:
knob_value = node[name].vaule()
values[name] = knob_value
except ValueError:
log.warning("missing knob {} in node {}"
"{}".format(name, node['name'].value()))
try:
yield
finally:
for name, value in values.items():
node[name].setValue(value)
from pype.api import Logger
log = Logger.getLogger(__name__, "nuke")
@contextlib.contextmanager
@ -38,7 +21,7 @@ def preserve_trim(node):
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
script_start = nuke.root()["first_frame"].value()
start_at_frame = None
offset_frame = None
@ -80,48 +63,19 @@ def loader_shift(node, frame, relative=True):
"""
# working script frame range
script_start = nuke.root()["start_frame"].value()
if node['frame_mode'].value() == "start at":
start_at_frame = node['frame'].value()
if node['frame_mode'].value() is "offset":
offset_frame = node['frame'].value()
script_start = nuke.root()["first_frame"].value()
if relative:
shift = frame
else:
if start_at_frame:
shift = frame
if offset_frame:
shift = frame + offset_frame
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start))
# Shifting global in will try to automatically compensate for the change
# in the "ClipTimeStart" and "HoldFirstFrame" inputs, so we preserve those
# input values to "just shift" the clip
with preserve_inputs(node, knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
# GlobalIn cannot be set past GlobalOut or vice versa
# so we must apply them in the order of the shift.
if start_at_frame:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(script_start + shift))
if offset_frame:
node['frame_mode'].setValue("offset")
node['frame'].setValue(str(shift))
return int(shift)
return int(script_start)
class NukeLoadSequence(api.Loader):
class LoadSequence(api.Loader):
"""Load image sequence into Nuke"""
families = ["imagesequence"]
families = ["write"]
representations = ["*"]
label = "Load sequence"
@ -130,12 +84,13 @@ class NukeLoadSequence(api.Loader):
color = "orange"
def load(self, context, name, namespace, data):
from avalon.nuke import (
containerise,
ls_img_sequence,
viewer_update_and_undo_stop
)
for k, v in context.items():
log.info("key: `{}`, value: {}\n".format(k, v))
# Fallback to asset name when namespace is None
if namespace is None:
@ -144,40 +99,56 @@ class NukeLoadSequence(api.Loader):
# Use the first file for now
# TODO: fix path fname
file = ls_img_sequence(os.path.dirname(self.fname), one=True)
log.info("file: {}\n".format(file))
read_name = "Read_" + context["representation"]["context"]["subset"]
# Create the Loader with the filename path set
with viewer_update_and_undo_stop():
# TODO: it might be universal read to img/geo/camera
r = nuke.createNode(
"Read",
"name {}".format(self.name)) # TODO: does self.name exist?
"name {}".format(read_name))
r["file"].setValue(file['path'])
if len(file['frames']) is 1:
first = file['frames'][0][0]
last = file['frames'][0][1]
r["originfirst"].setValue(first)
r["origfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["origlast"].setValue(last)
r["last"].setValue(last)
else:
first = file['frames'][0][0]
last = file['frames'][:-1][1]
r["originfirst"].setValue(first)
r["origfirst"].setValue(first)
r["first"].setValue(first)
r["originlast"].setValue(last)
r["origlast"].setValue(last)
r["last"].setValue(last)
log.warning("Missing frames in image sequence")
# Set colorspace defined in version data
colorspace = context["version"]["data"].get("colorspace", None)
if colorspace is not None:
r["colorspace"].setValue(str(colorspace))
# Set global in point to start frame (if in version.data)
start = context["version"]["data"].get("startFrame", None)
if start is not None:
loader_shift(r, start, relative=False)
loader_shift(r, start, relative=True)
# add additional metadata from the version to imprint to Avalon knob
add_keys = ["startFrame", "endFrame", "handles",
"source", "colorspace", "author", "fps"]
data_imprint = {}
for k in add_keys:
data_imprint.update({k: context["version"]['data'][k]})
containerise(r,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__)
loader=self.__class__.__name__,
data=data_imprint)
def switch(self, container, representation):
self.update(container, representation)
@ -196,7 +167,7 @@ class NukeLoadSequence(api.Loader):
ls_img_sequence,
update_container
)
log.info("this i can see")
node = container["_tool"]
# TODO: prepare also for other readers img/geo/camera
assert node.Class() == "Reader", "Must be Reader"
@ -218,15 +189,7 @@ class NukeLoadSequence(api.Loader):
# Update the loader's path whilst preserving some values
with preserve_trim(node):
with preserve_inputs(node,
knobs=["file",
"first",
"last",
"originfirst",
"originlast",
"frame_mode",
"frame"]):
node["file"] = file["path"]
node["file"] = file["path"]
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)

View file

@ -1,18 +1,18 @@
import pyblish.api
class CollectCurrentFile(pyblish.api.ContextPlugin):
class SelectCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.1
label = "Collect Current File"
order = pyblish.api.CollectorOrder
hosts = ["nuke"]
families = ["workfile"]
def process(self, context):
import os
import nuke
current_file = nuke.root().name()
normalised = os.path.normpath(current_file)
context.data["current_file"] = normalised
context.data["currentFile"] = normalised

View file

@ -0,0 +1,65 @@
import os
import nuke
import pyblish.api
from pype.nuke.lib import get_avalon_knob_data
@pyblish.api.log
class CollectNukeInstances(pyblish.api.ContextPlugin):
"""Collect all nodes with Avalon knob."""
order = pyblish.api.CollectorOrder
label = "Collect Instances"
hosts = ["nuke", "nukeassist"]
def process(self, context):
instances = []
# creating instances per write node
for node in nuke.allNodes():
try:
if node["disable"].value():
continue
except Exception:
continue
# get data from avalon knob
avalon_knob_data = get_avalon_knob_data(node)
if not avalon_knob_data:
continue
if avalon_knob_data["id"] != "pyblish.avalon.instance":
continue
subset = avalon_knob_data.get("subset", None) or node["name"].value()
# Create instance
instance = context.create_instance(subset)
instance.add(node)
instance.data.update({
"subset": subset,
"asset": os.environ["AVALON_ASSET"],
"label": node.name(),
"name": node.name(),
"subset": subset,
"families": [avalon_knob_data["families"]],
"family": avalon_knob_data["family"],
"avalonKnob": avalon_knob_data,
"publish": node.knob('publish')
})
self.log.info("collected instance: {}".format(instance.data))
instances.append(instance)
context.data["instances"] = instances
# Sort/grouped by family (preserving local index)
context[:] = sorted(context, key=self.sort_by_family)
self.log.debug("context: {}".format(context))
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -0,0 +1,98 @@
import os
import nuke
import pyblish.api
import logging
from avalon import io, api
log = logging.getLogger(__name__)
@pyblish.api.log
class CollectNukeWrites(pyblish.api.ContextPlugin):
"""Collect all write nodes."""
order = pyblish.api.CollectorOrder + 0.1
label = "Collect Writes"
hosts = ["nuke", "nukeassist"]
def process(self, context):
asset_data = io.find_one({"type": "asset",
"name": api.Session["AVALON_ASSET"]})
self.log.debug("asset_data: {}".format(asset_data["data"]))
for instance in context.data["instances"]:
self.log.debug("checking instance: {}".format(instance))
node = instance[0]
if node.Class() != "Write":
continue
# Determine defined file type
ext = node["file_type"].value()
# Determine output type
output_type = "img"
if ext == "mov":
output_type = "mov"
# Get frame range
first_frame = int(nuke.root()["first_frame"].getValue())
last_frame = int(nuke.root()["last_frame"].getValue())
if node["use_limit"].getValue():
first_frame = int(node["first"].getValue())
last_frame = int(node["last"].getValue())
# get path
path = nuke.filename(node)
output_dir = os.path.dirname(path)
self.log.debug('output dir: {}'.format(output_dir))
# Include start and end render frame in label
name = node.name()
label = "{0} ({1}-{2})".format(
name,
int(first_frame),
int(last_frame)
)
# preredered frames
if not node["render"].value():
families = "prerendered.frames"
collected_frames = os.listdir(output_dir)
self.log.debug("collected_frames: {}".format(label))
if "files" not in instance.data:
instance.data["files"] = list()
instance.data["files"].append(collected_frames)
instance.data['transfer'] = False
else:
# dealing with local/farm rendering
if node["render_farm"].value():
families = "{}.farm".format(instance.data["avalonKnob"]["families"][0])
else:
families = "{}.local".format(instance.data["avalonKnob"]["families"][0])
self.log.debug("checking for error: {}".format(label))
instance.data.update({
"path": path,
"outputDir": output_dir,
"ext": ext,
"label": label,
"families": [families, 'ftrack'],
"startFrame": first_frame,
"endFrame": last_frame,
"outputType": output_type,
"stagingDir": output_dir,
"colorspace": node["colorspace"].value(),
"handles": int(asset_data["data"].get("handles", 0)),
"step": 1,
"fps": int(nuke.root()['fps'].value())
})
self.log.debug("instance.data: {}".format(instance.data))
self.log.debug("context: {}".format(context))
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -20,7 +20,7 @@ class ExtractOutputDirectory(pyblish.api.InstancePlugin):
path = instance.data["collection"].format()
if "output_path" in instance.data.keys():
path = instance.data["output_path"]
path = instance.data["path"]
if not path:
return

View file

@ -1,15 +0,0 @@
import nuke
import pyblish.api
class ExtractScriptSave(pyblish.api.InstancePlugin):
""" Saves the script before extraction. """
order = pyblish.api.ExtractorOrder - 0.49
label = "Script Save"
hosts = ["nuke"]
families = ["saver"]
def process(self, instance):
nuke.scriptSave()

View file

@ -0,0 +1,366 @@
import os
import logging
import shutil
import clique
import errno
import pyblish.api
from avalon import api, io
log = logging.getLogger(__name__)
class IntegrateFrames(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Integrate Frames"
order = pyblish.api.IntegratorOrder
families = ["prerendered.frames"]
def process(self, instance):
self.register(instance)
self.log.info("Integrating Asset in to the database ...")
if instance.data.get('transfer', True):
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
LOCATION = api.Session["AVALON_LOCATION"]
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
assert all(result["success"] for result in context.data["results"]), (
"Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
assert stagingdir, ("Incomplete instance \"%s\": "
"Missing reference to staging area." % instance)
# extra check if stagingDir actually exists and is available
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset",
"name": ASSET,
"parent": project["_id"]})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
subset = self.get_subset(asset, instance)
# get next version
latest_version = io.find_one({"type": "version",
"parent": subset["_id"]},
{"name": True},
sort=[("name", -1)])
next_version = 1
if latest_version is not None:
next_version += latest_version["name"]
self.log.info("Verifying version from assumed destination")
assumed_data = instance.data["assumedTemplateData"]
assumed_version = assumed_data["version"]
if assumed_version != next_version:
raise AttributeError("Assumed version 'v{0:03d}' does not match"
"next version in database "
"('v{1:03d}')".format(assumed_version,
next_version))
self.log.debug("Next version: v{0:03d}".format(next_version))
version_data = self.create_version_data(context, instance)
version = self.create_version(subset=subset,
version_number=next_version,
locations=[LOCATION],
data=version_data)
self.log.debug("version: {}".format(version))
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
self.log.debug("version_id: {}".format(version_id))
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
root = api.registered_root()
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
self.log.debug("hierarchy: {}".format(hierarchy))
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"task": api.Session["AVALON_TASK"],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"VERSION": version["name"],
"hierarchy": hierarchy}
# template_publish = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
for files in instance.data["files"]:
# Collection
# _______
# |______|\
# | |\|
# | ||
# | ||
# | ||
# |_______|
#
if isinstance(files, list):
collection = files
# Assert that each member has identical suffix
dst_collection = []
for fname in collection:
filename, ext = os.path.splitext(fname)
_, frame = os.path.splitext(filename)
template_data["representation"] = ext[1:]
template_data["frame"] = frame[1:]
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.render.path
dst_collection.append(dst)
instance.data["transfers"].append([src, dst])
template = anatomy.render.path
collections, remainder = clique.assemble(dst_collection)
dst = collections[0].format('{head}{padding}{tail}')
else:
# Single file
# _______
# | |\
# | |
# | |
# | |
# |_______|
#
fname = files
assert not os.path.isabs(fname), (
"Given file name is a full path"
)
_, ext = os.path.splitext(fname)
template_data["representation"] = ext[1:]
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.render.path
template = anatomy.render.path
instance.data["transfers"].append([src, dst])
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {'path': dst, 'template': template},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
# for performance reasons.
"context": {
"root": root,
"project": PROJECT,
"projectcode": project['data']['code'],
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": version["name"],
"hierarchy": hierarchy,
"representation": ext[1:]
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
def integrate(self, instance):
"""Move the files
Through `instance.data["transfers"]`
Args:
instance: the instance to integrate
"""
transfers = instance.data["transfers"]
for src, dest in transfers:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)
def get_subset(self, asset, instance):
subset = io.find_one({"type": "subset",
"parent": asset["_id"],
"name": instance.data["subset"]})
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
_id = io.insert_one({
"schema": "pype:subset-2.0",
"type": "subset",
"name": subset_name,
"data": {},
"parent": asset["_id"]
}).inserted_id
subset = io.find_one({"_id": _id})
return subset
def create_version(self, subset, version_number, locations, data=None):
""" Copy given source to destination
Args:
subset (dict): the registered subset of the asset
version_number (int): the version number
locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
"""
# Imprint currently registered location
version_locations = [location for location in locations if
location is not None]
return {"schema": "pype:version-2.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,
"locations": version_locations,
"data": data}
def create_version_data(self, context, instance):
"""Create the data collection for the version
Args:
context: the current context
instance: the current instance being published
Returns:
dict: the required information with instance.data as key
"""
families = []
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
if instance_family is not None:
families.append(instance_family)
families += current_families
# create relative source path for DB
relative_path = os.path.relpath(context.data["currentFile"],
api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment")}
# Include optional data if present in
optionals = ["startFrame", "endFrame", "step",
"handles", "colorspace", "fps", "outputDir"]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data.get(key, None)
return version_data

View file

@ -0,0 +1,48 @@
import pyblish.api
import nuke
class NukeRenderLocal(pyblish.api.InstancePlugin):
# TODO: rewrite docstring to nuke
"""Render the current Fusion composition locally.
Extract the result of savers by starting a comp render
This will run the local render of Fusion.
"""
order = pyblish.api.ExtractorOrder
label = "Render Local"
hosts = ["nuke"]
families = ["render.local", "prerender.local", "still.local"]
def process(self, instance):
# This should be a ContextPlugin, but this is a workaround
# for a bug in pyblish to run once for a family: issue #250
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
if context.data.get(key, False):
return
else:
context.data[key] = True
self.log.debug("instance collected: {}".format(instance.data))
first_frame = instance.data.get("startFrame", None)
last_frame = instance.data.get("endFrame", None)
node_subset_name = instance.data.get("name", None)
self.log.info("Starting render")
self.log.info("Start frame: {}".format(first_frame))
self.log.info("End frame: {}".format(last_frame))
# Render frames
nuke.execute(
node_subset_name,
int(first_frame),
int(last_frame)
)
# swith to prerendered.frames
instance[0]["render"].setValue(False)
self.log.info('Finished render')

View file

@ -0,0 +1,15 @@
import nuke
import pyblish.api
class ExtractScriptSave(pyblish.api.Extractor):
"""
"""
label = 'Script Save'
order = pyblish.api.Extractor.order - 0.45
hosts = ['nuke']
def process(self, instance):
self.log.info('saving script')
nuke.scriptSave()

View file

@ -0,0 +1,61 @@
import os
import pyblish.api
import clique
@pyblish.api.log
class RepairCollectionAction(pyblish.api.Action):
label = "Repair"
on = "failed"
icon = "wrench"
def process(self, context, plugin):
files_remove = [os.path.join(context[0].data["outputDir"], f)
for f in context[0].data["files"]]
for f in files_remove:
os.remove(f)
self.log.debug("removing file: {}".format(f))
context[0][0]["render"].setValue(True)
self.log.info("Rendering toggled ON")
class ValidateCollection(pyblish.api.InstancePlugin):
""" Validates file output. """
order = pyblish.api.ValidatorOrder
# optional = True
families = ['prerendered.frames']
label = "Check prerendered frames"
hosts = ["nuke"]
actions = [RepairCollectionAction]
def process(self, instance):
self.log.debug('instance.data["files"]: {}'.format(instance.data['files']))
collections, remainder = clique.assemble(*instance.data['files'])
self.log.info('collections: {}'.format(str(collections)))
frame_length = instance.data["endFrame"] \
- instance.data["startFrame"] + 1
if frame_length is not 1:
assert len(collections) == 1, self.log.info(
"There are multiple collections in the folder")
assert collections[0].is_contiguous(), self.log.info("Some frames appear to be missing")
assert remainder is not None, self.log.info("There are some extra files in folder")
basename, ext = os.path.splitext(list(collections[0])[0])
assert all(ext == os.path.splitext(name)[1]
for name in collections[0]), self.log.info(
"Files had varying suffixes"
)
assert not any(os.path.isabs(name) for name in collections[0]), self.log.info("some file name are absolute")
self.log.info('frame_length: {}'.format(frame_length))
self.log.info('len(list(instance.data["files"])): {}'.format(
len(list(instance.data["files"][0]))))
assert len(list(instance.data["files"][0])) is frame_length, self.log.info(
"{} missing frames. Use repair to render all frames".format(__name__))

View file

@ -1,20 +0,0 @@
import os
import pyblish.api
@pyblish.api.log
class ValidatePrerendersOutput(pyblish.api.Validator):
"""Validates that the output directory for the write nodes exists"""
families = ['write.prerender']
hosts = ['nuke']
label = 'Pre-renders output'
def process(self, instance):
path = os.path.dirname(instance[0]['file'].value())
if 'output' not in path:
name = instance[0].name()
msg = 'Output directory for %s is not in an "output" folder.' % name
raise ValueError(msg)

100
pype/templates.py Normal file
View file

@ -0,0 +1,100 @@
import os
import re
from avalon import io
from app.api import (Templates, Logger, format)
log = Logger.getLogger(__name__,
os.getenv("AVALON_APP", "pype-config"))
def load_data_from_templates():
from . import api
if not any([
api.Dataflow,
api.Anatomy,
api.Colorspace,
api.Metadata
]
):
# base = Templates()
t = Templates(type=["anatomy", "metadata", "dataflow", "colorspace"])
api.Anatomy = t.anatomy
api.Metadata = t.metadata.format()
data = {"metadata": api.Metadata}
api.Dataflow = t.dataflow.format(data)
api.Colorspace = t.colorspace
log.info("Data from templates were Loaded...")
def reset_data_from_templates():
from . import api
api.Dataflow = None
api.Anatomy = None
api.Colorspace = None
api.Metadata = None
log.info("Data from templates were Unloaded...")
def get_version_from_workfile(file):
pattern = re.compile(r"_v([0-9]*)")
try:
v_string = pattern.findall(file)[0]
return v_string
except IndexError:
log.error("templates:get_version_from_workfile:"
"`{}` missing version string."
"Example `v004`".format(file))
def get_project_code():
return io.find_one({"type": "project"})["data"]["code"]
def get_project_name():
project_name = os.getenv("AVALON_PROJECT", None)
assert project_name, log.error("missing `AVALON_PROJECT`"
"in environment variables")
return project_name
def get_asset():
asset = os.getenv("AVALON_ASSET", None)
assert asset, log.error("missing `AVALON_ASSET`"
"in environment variables")
return asset
def get_task():
task = os.getenv("AVALON_TASK", None)
assert task, log.error("missing `AVALON_TASK`"
"in environment variables")
return task
def get_hiearchy():
hierarchy = io.find_one({
"type": 'asset',
"name": get_asset()}
)['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
return os.path.join(*hierarchy)
def fill_avalon_workdir():
awd = os.getenv("AVALON_WORKDIR", None)
assert awd, log.error("missing `AVALON_WORKDIR`"
"in environment variables")
if "{" not in awd:
return
data = {
"hierarchy": get_hiearchy(),
"task": get_task(),
"asset": get_asset(),
"project": {"name": get_project_name(),
"code": get_project_code()}}
awd_filled = os.path.normpath(format(awd, data))
os.environ["AVALON_WORKDIR"] = awd_filled
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))

View file

@ -1,98 +0,0 @@
from .lib import *
def load_capture_preset(path):
import capture_gui
import capture
path = path
preset = capture_gui.lib.load_json(path)
print preset
options = dict()
# CODEC
id = 'Codec'
for key in preset[id]:
options[str(key)] = preset[id][key]
# GENERIC
id = 'Generic'
for key in preset[id]:
if key.startswith('isolate'):
pass
# options['isolate'] = preset[id][key]
else:
options[str(key)] = preset[id][key]
# RESOLUTION
id = 'Resolution'
options['height'] = preset[id]['height']
options['width'] = preset[id]['width']
# DISPLAY OPTIONS
id = 'Display Options'
disp_options = {}
for key in preset['Display Options']:
if key.startswith('background'):
disp_options[key] = preset['Display Options'][key]
else:
disp_options['displayGradient'] = True
options['display_options'] = disp_options
# VIEWPORT OPTIONS
temp_options = {}
id = 'Renderer'
for key in preset[id]:
temp_options[str(key)] = preset[id][key]
temp_options2 = {}
id = 'Viewport Options'
light_options = {0: "default",
1: 'all',
2: 'selected',
3: 'flat',
4: 'nolights'}
for key in preset[id]:
if key == 'high_quality':
temp_options2['multiSampleEnable'] = True
temp_options2['multiSampleCount'] = 4
temp_options2['textureMaxResolution'] = 512
temp_options2['enableTextureMaxRes'] = True
if key == 'alphaCut':
temp_options2['transparencyAlgorithm'] = 5
temp_options2['transparencyQuality'] = 1
if key == 'headsUpDisplay':
temp_options['headsUpDisplay'] = True
if key == 'displayLights':
temp_options[str(key)] = light_options[preset[id][key]]
else:
temp_options[str(key)] = preset[id][key]
for key in ['override_viewport_options', 'high_quality', 'alphaCut']:
temp_options.pop(key, None)
options['viewport_options'] = temp_options
options['viewport2_options'] = temp_options2
# use active sound track
scene = capture.parse_active_scene()
options['sound'] = scene['sound']
cam_options = dict()
cam_options['overscan'] = 1.0
cam_options['displayFieldChart'] = False
cam_options['displayFilmGate'] = False
cam_options['displayFilmOrigin'] = False
cam_options['displayFilmPivot'] = False
cam_options['displayGateMask'] = False
cam_options['displayResolution'] = False
cam_options['displaySafeAction'] = False
cam_options['displaySafeTitle'] = False
# options['display_options'] = temp_options
return options

View file

@ -105,3 +105,99 @@ def filter_instances(context, plugin):
instances = pyblish.api.instances_by_plugin(allInstances, plugin)
return instances
def load_capture_preset(path):
import capture_gui
import capture
path = path
preset = capture_gui.lib.load_json(path)
print preset
options = dict()
# CODEC
id = 'Codec'
for key in preset[id]:
options[str(key)] = preset[id][key]
# GENERIC
id = 'Generic'
for key in preset[id]:
if key.startswith('isolate'):
pass
# options['isolate'] = preset[id][key]
else:
options[str(key)] = preset[id][key]
# RESOLUTION
id = 'Resolution'
options['height'] = preset[id]['height']
options['width'] = preset[id]['width']
# DISPLAY OPTIONS
id = 'Display Options'
disp_options = {}
for key in preset['Display Options']:
if key.startswith('background'):
disp_options[key] = preset['Display Options'][key]
else:
disp_options['displayGradient'] = True
options['display_options'] = disp_options
# VIEWPORT OPTIONS
temp_options = {}
id = 'Renderer'
for key in preset[id]:
temp_options[str(key)] = preset[id][key]
temp_options2 = {}
id = 'Viewport Options'
light_options = {0: "default",
1: 'all',
2: 'selected',
3: 'flat',
4: 'nolights'}
for key in preset[id]:
if key == 'high_quality':
temp_options2['multiSampleEnable'] = True
temp_options2['multiSampleCount'] = 4
temp_options2['textureMaxResolution'] = 512
temp_options2['enableTextureMaxRes'] = True
if key == 'alphaCut':
temp_options2['transparencyAlgorithm'] = 5
temp_options2['transparencyQuality'] = 1
if key == 'headsUpDisplay':
temp_options['headsUpDisplay'] = True
if key == 'displayLights':
temp_options[str(key)] = light_options[preset[id][key]]
else:
temp_options[str(key)] = preset[id][key]
for key in ['override_viewport_options', 'high_quality', 'alphaCut']:
temp_options.pop(key, None)
options['viewport_options'] = temp_options
options['viewport2_options'] = temp_options2
# use active sound track
scene = capture.parse_active_scene()
options['sound'] = scene['sound']
cam_options = dict()
cam_options['overscan'] = 1.0
cam_options['displayFieldChart'] = False
cam_options['displayFilmGate'] = False
cam_options['displayFilmOrigin'] = False
cam_options['displayFilmPivot'] = False
cam_options['displayGateMask'] = False
cam_options['displayResolution'] = False
cam_options['displaySafeAction'] = False
cam_options['displaySafeTitle'] = False
# options['display_options'] = temp_options
return options

View file

@ -0,0 +1,498 @@
from app import style
from avalon.vendor.Qt import QtCore, QtGui, QtWidgets
import os
import getpass
import platform
import ftrack_api
# object symbol
class Project_name_getUI(QtWidgets.QWidget):
'''
Project setting ui: here all the neceserry ui widgets are created
they are going to be used i later proces for dynamic linking of project
in list to project's attributes
'''
def __init__(self, parent=None):
super(Project_name_getUI, self).__init__(parent)
self.platform = platform.system()
self.new_index = 0
# get projects from ftrack
self.session = ftrack_api.Session()
self.projects_from_ft = self.session.query(
'Project where status is active')
self.disks_from_ft = self.session.query('Disk')
self.schemas_from_ft = self.session.query('ProjectSchema')
self.projects = self._get_projects_ftrack()
# define window geometry
self.setWindowTitle('Set project attributes')
self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
self.resize(550, 340)
self.setStyleSheet(style.load_stylesheet())
# define disk combobox widget
self.disks = self._get_all_disks()
self.disk_combobox_label = QtWidgets.QLabel('Destination storage:')
self.disk_combobox = QtWidgets.QComboBox()
# define schema combobox widget
self.schemas = self._get_all_schemas()
self.schema_combobox_label = QtWidgets.QLabel('Project schema:')
self.schema_combobox = QtWidgets.QComboBox()
# define fps widget
self.fps_label = QtWidgets.QLabel('Fps:')
self.fps_label.setAlignment(
QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.fps = QtWidgets.QLineEdit()
# define project dir widget
self.project_dir_label = QtWidgets.QLabel('Project dir:')
self.project_dir_label.setAlignment(
QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.project_dir = QtWidgets.QLineEdit()
self.project_path_label = QtWidgets.QLabel(
'Project_path (if not then created):')
self.project_path_label.setAlignment(
QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
project_path_font = QtGui.QFont(
"Helvetica [Cronyx]", 12, QtGui.QFont.Bold)
self.project_path = QtWidgets.QLabel()
self.project_path.setObjectName('nom_plan_label')
self.project_path.setStyleSheet(
'QtWidgets.QLabel#nom_plan_label {color: red}')
self.project_path.setAlignment(
QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.project_path.setFont(project_path_font)
# define handles widget
self.handles_label = QtWidgets.QLabel('Handles:')
self.handles_label.setAlignment(
QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
self.handles = QtWidgets.QLineEdit()
# define resolution widget
self.resolution_w_label = QtWidgets.QLabel('W:')
self.resolution_w = QtWidgets.QLineEdit()
self.resolution_h_label = QtWidgets.QLabel('H:')
self.resolution_h = QtWidgets.QLineEdit()
devider = QtWidgets.QFrame()
# devider.Shape(QFrame.HLine)
devider.setFrameShape(QtWidgets.QFrame.HLine)
devider.setFrameShadow(QtWidgets.QFrame.Sunken)
self.generate_lines()
# define push buttons
self.set_pushbutton = QtWidgets.QPushButton('Set project')
self.cancel_pushbutton = QtWidgets.QPushButton('Cancel')
# definition of layouts
############################################
action_layout = QtWidgets.QHBoxLayout()
action_layout.addWidget(self.set_pushbutton)
action_layout.addWidget(self.cancel_pushbutton)
# schema property
schema_layout = QtWidgets.QGridLayout()
schema_layout.addWidget(self.schema_combobox, 0, 1)
schema_layout.addWidget(self.schema_combobox_label, 0, 0)
# storage property
storage_layout = QtWidgets.QGridLayout()
storage_layout.addWidget(self.disk_combobox, 0, 1)
storage_layout.addWidget(self.disk_combobox_label, 0, 0)
# fps property
fps_layout = QtWidgets.QGridLayout()
fps_layout.addWidget(self.fps, 1, 1)
fps_layout.addWidget(self.fps_label, 1, 0)
# project dir property
project_dir_layout = QtWidgets.QGridLayout()
project_dir_layout.addWidget(self.project_dir, 1, 1)
project_dir_layout.addWidget(self.project_dir_label, 1, 0)
# project path property
project_path_layout = QtWidgets.QGridLayout()
spacer_1_item = QtWidgets.QSpacerItem(10, 10)
project_path_layout.addItem(spacer_1_item, 0, 1)
project_path_layout.addWidget(self.project_path_label, 1, 1)
project_path_layout.addWidget(self.project_path, 2, 1)
spacer_2_item = QtWidgets.QSpacerItem(20, 20)
project_path_layout.addItem(spacer_2_item, 3, 1)
# handles property
handles_layout = QtWidgets.QGridLayout()
handles_layout.addWidget(self.handles, 1, 1)
handles_layout.addWidget(self.handles_label, 1, 0)
# resolution property
resolution_layout = QtWidgets.QGridLayout()
resolution_layout.addWidget(self.resolution_w_label, 1, 1)
resolution_layout.addWidget(self.resolution_w, 2, 1)
resolution_layout.addWidget(self.resolution_h_label, 1, 2)
resolution_layout.addWidget(self.resolution_h, 2, 2)
# form project property layout
p_layout = QtWidgets.QGridLayout()
p_layout.addLayout(storage_layout, 1, 0)
p_layout.addLayout(schema_layout, 2, 0)
p_layout.addLayout(project_dir_layout, 3, 0)
p_layout.addLayout(fps_layout, 4, 0)
p_layout.addLayout(handles_layout, 5, 0)
p_layout.addLayout(resolution_layout, 6, 0)
p_layout.addWidget(devider, 7, 0)
spacer_item = QtWidgets.QSpacerItem(
150,
40,
QtWidgets.QSizePolicy.Minimum,
QtWidgets.QSizePolicy.Expanding
)
p_layout.addItem(spacer_item, 8, 0)
# form with list to one layout with project property
list_layout = QtWidgets.QGridLayout()
list_layout.addLayout(p_layout, 1, 0)
list_layout.addWidget(self.listWidget, 1, 1)
root_layout = QtWidgets.QVBoxLayout()
root_layout.addLayout(project_path_layout)
root_layout.addWidget(devider)
root_layout.addLayout(list_layout)
root_layout.addLayout(action_layout)
self.setLayout(root_layout)
def generate_lines(self):
'''
Will generate lines of project list
'''
self.listWidget = QtWidgets.QListWidget()
for self.index, p in enumerate(self.projects):
item = QtWidgets.QListWidgetItem("{full_name}".format(**p))
# item.setSelected(False)
self.listWidget.addItem(item)
print(self.listWidget.indexFromItem(item))
# self.listWidget.setCurrentItem(self.listWidget.itemFromIndex(1))
# add options to schemas widget
self.schema_combobox.addItems(self.schemas)
# add options to disk widget
self.disk_combobox.addItems(self.disks)
# populate content of project info widgets
self.projects[1] = self._fill_project_attributes_widgets(p, None)
def _fill_project_attributes_widgets(self, p=None, index=None):
'''
will generate actual informations wich are saved on ftrack
'''
if index is None:
self.new_index = 1
if not p:
pass
# change schema selection
for i, schema in enumerate(self.schemas):
if p['project_schema']['name'] in schema:
break
self.schema_combobox.setCurrentIndex(i)
disk_name, disk_path = self._build_disk_path()
for i, disk in enumerate(self.disks):
if disk_name in disk:
break
# change disk selection
self.disk_combobox.setCurrentIndex(i)
# change project_dir selection
if "{root}".format(**p):
self.project_dir.setPlaceholderText("{root}".format(**p))
else:
print("not root so it was replaced with name")
self.project_dir.setPlaceholderText("{name}".format(**p))
p['root'] = p['name']
# set project path to show where it will be created
self.project_path.setText(
os.path.join(self.disks[i].split(' ')[-1],
self.project_dir.text()))
# change fps selection
self.fps.setPlaceholderText("{custom_attributes[fps]}".format(**p))
# change handles selection
self.handles.setPlaceholderText(
"{custom_attributes[handles]}".format(**p))
# change resolution selection
self.resolution_w.setPlaceholderText(
"{custom_attributes[resolution_width]}".format(**p))
self.resolution_h.setPlaceholderText(
"{custom_attributes[resolution_height]}".format(**p))
self.update_disk()
return p
def fix_project_path_literals(self, dir):
return dir.replace(' ', '_').lower()
def update_disk(self):
disk = self.disk_combobox.currentText().split(' ')[-1]
dir = self.project_dir.text()
if not dir:
dir = "{root}".format(**self.projects[self.new_index])
self.projects[self.new_index]['project_path'] = os.path.normpath(
self.fix_project_path_literals(os.path.join(disk, dir)))
else:
self.projects[self.new_index]['project_path'] = os.path.normpath(
self.fix_project_path_literals(os.path.join(disk, dir)))
self.projects[self.new_index]['disk'] = self.disks_from_ft[
self.disk_combobox.currentIndex()]
self.projects[self.new_index]['disk_id'] = self.projects[
self.new_index]['disk']['id']
# set project path to show where it will be created
self.project_path.setText(
self.projects[self.new_index]['project_path'])
def update_resolution(self):
# update all values in resolution
if self.resolution_w.text():
self.projects[self.new_index]['custom_attributes'][
'resolution_width'] = int(self.resolution_w.text())
if self.resolution_h.text():
self.projects[self.new_index]['custom_attributes'][
'resolution_height'] = int(self.resolution_h.text())
def _update_attributes_by_list_selection(self):
# generate actual selection index
self.new_index = self.listWidget.currentRow()
self.project_dir.setText('')
self.fps.setText('')
self.handles.setText('')
self.resolution_w.setText('')
self.resolution_h.setText('')
# update project properities widgets and write changes
# into project dictionaries
self.projects[self.new_index] = self._fill_project_attributes_widgets(
self.projects[self.new_index], self.new_index)
self.update_disk()
def _build_disk_path(self):
if self.platform == "Windows":
print(self.projects[self.index].keys())
print(self.projects[self.new_index]['disk'])
return self.projects[self.new_index]['disk'][
'name'], self.projects[self.new_index]['disk']['windows']
else:
return self.projects[self.new_index]['disk'][
'name'], self.projects[self.new_index]['disk']['unix']
def _get_all_schemas(self):
schemas_list = []
for s in self.schemas_from_ft:
# print d.keys()
# if 'Pokus' in s['name']:
# continue
schemas_list.append('{}'.format(s['name']))
print("\nschemas in ftrack: {}\n".format(schemas_list))
return schemas_list
def _get_all_disks(self):
disks_list = []
for d in self.disks_from_ft:
# print d.keys()
if self.platform == "Windows":
if 'Local drive' in d['name']:
d['windows'] = os.path.join(d['windows'],
os.getenv('USERNAME')
or os.getenv('USER')
or os.getenv('LOGNAME'))
disks_list.append('"{}" at {}'.format(d['name'], d['windows']))
else:
if 'Local drive' in d['name']:
d['unix'] = os.path.join(d['unix'], getpass.getuser())
disks_list.append('"{}" at {}'.format(d['name'], d['unix']))
return disks_list
def _get_projects_ftrack(self):
projects_lst = []
for project in self.projects_from_ft:
# print project.keys()
projects_dict = {}
for k in project.keys():
''' # TODO: delete this in production version '''
# if 'test' not in project['name']:
# continue
# print '{}: {}\n'.format(k, project[k])
if '_link' == k:
# print project[k]
content = project[k]
for kc in content[0].keys():
if content[0]['name']:
content[0][kc] = content[0][kc].encode(
'ascii', 'ignore').decode('ascii')
print('{}: {}\n'.format(kc, content[0][kc]))
projects_dict[k] = content
print(project[k])
print(projects_dict[k])
elif 'root' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k]
elif 'disk' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k]
elif 'name' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k].encode(
'ascii', 'ignore').decode('ascii')
elif 'disk_id' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k]
elif 'id' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k]
elif 'full_name' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k].encode(
'ascii', 'ignore').decode('ascii')
elif 'project_schema_id' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k]
elif 'project_schema' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k]
elif 'custom_attributes' == k:
print('{}: {}\n'.format(k, project[k]))
projects_dict[k] = project[k]
else:
pass
if projects_dict:
projects_lst.append(projects_dict)
return projects_lst
class Project_name_get(Project_name_getUI):
def __init__(self, parent=None):
super(Project_name_get, self).__init__(parent)
# self.input_project_name.textChanged.connect(self.input_project_name.placeholderText)
self.set_pushbutton.clicked.connect(lambda: self.execute())
self.cancel_pushbutton.clicked.connect(self.close)
self.listWidget.itemSelectionChanged.connect(
self._update_attributes_by_list_selection)
self.disk_combobox.currentIndexChanged.connect(self.update_disk)
self.schema_combobox.currentIndexChanged.connect(self.update_schema)
self.project_dir.textChanged.connect(self.update_disk)
self.fps.textChanged.connect(self.update_fps)
self.handles.textChanged.connect(self.update_handles)
self.resolution_w.textChanged.connect(self.update_resolution)
self.resolution_h.textChanged.connect(self.update_resolution)
def update_handles(self):
self.projects[self.new_index]['custom_attributes']['handles'] = int(
self.handles.text())
def update_fps(self):
self.projects[self.new_index]['custom_attributes']['fps'] = int(
self.fps.text())
def update_schema(self):
self.projects[self.new_index]['project_schema'] = self.schemas_from_ft[
self.schema_combobox.currentIndex()]
self.projects[self.new_index]['project_schema_id'] = self.projects[
self.new_index]['project_schema']['id']
def execute(self):
# import ft_utils
# import hiero
# get the project which has been selected
print("well and what")
# set the project as context and create entity
# entity is task created with the name of user which is creating it
# get the project_path and create dir if there is not any
print(self.projects[self.new_index]['project_path'].replace(
self.disk_combobox.currentText().split(' ')[-1].lower(), ''))
# get the schema and recreate a starting project regarding the selection
# set_hiero_template(project_schema=self.projects[self.new_index][
# 'project_schema']['name'])
# set all project properities
# project = hiero.core.Project()
# project.setFramerate(
# int(self.projects[self.new_index]['custom_attributes']['fps']))
# project.projectRoot()
# print 'handles: {}'.format(self.projects[self.new_index]['custom_attributes']['handles'])
# print 'resolution_width: {}'.format(self.projects[self.new_index]['custom_attributes']['resolution_width'])
# print 'resolution_width: {}'.format(self.projects[self.new_index]['custom_attributes']['resolution_height'])
# print "<< {}".format(self.projects[self.new_index])
# get path for the hrox file
# root = context.data('ftrackData')['Project']['root']
# hrox_script_path = ft_utils.getPathsYaml(taskid, templateList=templates, root=root)
# save the hrox into the correct path
self.session.commit()
self.close()
#
# def set_hiero_template(project_schema=None):
# import hiero
# hiero.core.closeAllProjects()
# hiero_plugin_path = [
# p for p in os.environ['HIERO_PLUGIN_PATH'].split(';')
# if 'hiero_plugin_path' in p
# ][0]
# path = os.path.normpath(
# os.path.join(hiero_plugin_path, 'Templates', project_schema + '.hrox'))
# print('---> path to template: {}'.format(path))
# return hiero.core.openProject(path)
# def set_out_ft_session():
# session = ftrack_api.Session()
# projects_to_ft = session.query('Project where status is active')
def main():
import sys
app = QtWidgets.QApplication(sys.argv)
panel = Project_name_get()
panel.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()

View file

@ -1,14 +1,17 @@
from avalon.tools import workfiles
import nuke
# auto fix version paths in write nodes following root name of script
cmd = '''
import re
rootVersion=re.search('[vV]\d+', os.path.split(nuke.root().name())[1]).group()
for each in nuke.allNodes():
if each.Class() == 'Write':
each['file'].setValue(re.sub('[vV]\d+', rootVersion, each['file'].value()))
'''
nuke.knobDefault('onScriptSave', cmd)
print '\n>>> menu.py: Function for automatic check of version in write nodes is added\n'
# cmd = '''
# import re
# rootVersion=re.search('[vV]\d+', os.path.split(nuke.root().name())[1]).group()
# for each in nuke.allNodes():
# if each.Class() == 'Write':
# each['file'].setValue(re.sub('[vV]\d+', rootVersion, each['file'].value()))
# '''
# nuke.knobDefault('onScriptSave', cmd)
#
# print '\n>>> menu.py: Function for automatic check of version in write nodes is added\n'
ffmpeg_cmd = '''if nuke.env['LINUX']:
nuke.tcl('load ffmpegReader')
@ -22,6 +25,8 @@ nuke.knobDefault('onScriptLoad', ffmpeg_cmd)
# # run avalon's tool Workfiles
# workfiles = '''from avalon.tools import workfiles
# if nuke.Root().name() == 'Root':
# nuke.scriptClose()
# nuke.scriptClear()
# workfiles.show(os.environ["AVALON_WORKDIR"])'''
# nuke.knobDefault('onCreate', workfiles)
# workfiles.show(os.environ["AVALON_WORKDIR"])