\d+.\d+.\d+)")
- applicationIdentifier = "djvview"
- description = "DJV View Launcher"
- icon = "http://a.fsdn.com/allura/p/djv/icon"
- expression = []
- if sys.platform == "win32":
- expression = ["C:\\", "Program Files", "djv-\d.+",
- "bin", "djv_view.exe"]
+ entityType = selection[0].get("entityType", None)
+ if entityType in ["assetversion", "task"]:
+ return True
+ return False
- elif sys.platform == "darwin":
- expression = ["Application", "DJV.app", "Contents", "MacOS", "DJV"]
- # Linuxs
- else:
- expression = ["usr", "local", "djv", "djv_view"]
-
- pieces = expression[:]
- start = pieces.pop(0)
-
- if sys.platform == 'win32':
- # On Windows C: means current directory so convert roots that look
- # like drive letters to the C:\ format.
- if start and start[-1] == ':':
- start += '\\'
-
- if not os.path.exists(start):
- raise ValueError(
- 'First part "{0}" of expression "{1}" must match exactly to an'
- ' existing entry on the filesystem.'
- .format(start, expression)
- )
-
- expressions = list(map(re.compile, pieces))
- expressionsCount = len(expression)-1
-
- for location, folders, files in os.walk(
- start, topdown=True, followlinks=True
- ):
- level = location.rstrip(os.path.sep).count(os.path.sep)
- expression = expressions[level]
-
- if level < (expressionsCount - 1):
- # If not yet at final piece then just prune directories.
- folders[:] = [folder for folder in folders
- if expression.match(folder)]
- else:
- # Match executable. Note that on OSX executable might equate to
- # a folder (.app).
- for entry in folders + files:
- match = expression.match(entry)
- if match:
- # Extract version from full matching path.
- path = os.path.join(start, location, entry)
- versionMatch = versionExpression.search(path)
- if versionMatch:
- version = versionMatch.group('version')
-
- applications.append({
- 'identifier': applicationIdentifier.format(
- version=version
- ),
- 'path': path,
- 'version': version,
- 'label': label.format(version=version),
- 'icon': icon,
- # 'variant': variant.format(version=version),
- 'description': description
- })
- else:
- self.logger.debug(
- 'Discovered application executable, but it '
- 'does not appear to o contain required version'
- ' information: {0}'.format(path)
- )
-
- # Don't descend any further as out of patterns to match.
- del folders[:]
-
- return applications
-
- def translate_event(self, session, event):
- '''Return *event* translated structure to be used with the API.'''
-
- selection = event['data'].get('selection', [])
-
- entities = list()
- for entity in selection:
- entities.append(
- (session.get(
- self.get_entity_type(entity), entity.get('entityId')
- ))
- )
-
- return entities
-
- def get_entity_type(self, entity):
- entity_type = entity.get('entityType').replace('_', '').lower()
-
- for schema in self.session.schemas:
- alias_for = schema.get('alias_for')
-
- if (
- alias_for and isinstance(alias_for, str) and
- alias_for.lower() == entity_type
- ):
- return schema['id']
-
- for schema in self.session.schemas:
- if schema['id'].lower() == entity_type:
- return schema['id']
-
- raise ValueError(
- 'Unable to translate entity type: {0}.'.format(entity_type)
- )
-
- def launch(self, event):
- """Callback method for DJVView action."""
- session = self.session
- entities = self.translate_event(session, event)
-
- # Launching application
- if "values" in event["data"]:
- filename = event['data']['values']['path']
- file_type = filename.split(".")[-1]
-
- # TODO Is this proper way?
- try:
- fps = int(entities[0]['custom_attributes']['fps'])
- except Exception:
- fps = 24
-
- # TODO issequence is probably already built-in validation in ftrack
- isseq = re.findall('%[0-9]*d', filename)
- if len(isseq) > 0:
- if len(isseq) == 1:
- frames = []
- padding = re.findall('%[0-9]*d', filename).pop()
- index = filename.find(padding)
-
- full_file = filename[0:index-1]
- file = full_file.split(os.sep)[-1]
- folder = os.path.dirname(full_file)
-
- for fname in os.listdir(path=folder):
- if fname.endswith(file_type) and file in fname:
- frames.append(int(fname.split(".")[-2]))
-
- if len(frames) > 0:
- start = min(frames)
- end = max(frames)
-
- range = (padding % start) + '-' + (padding % end)
- filename = re.sub('%[0-9]*d', range, filename)
- else:
- msg = (
- 'DJV View - Filename has more than one'
- ' sequence identifier.'
- )
- return {
- 'success': False,
- 'message': (msg)
- }
-
- cmd = []
- # DJV path
- cmd.append(os.path.normpath(self.djv_path))
- # DJV Options Start ##############################################
- '''layer name'''
- # cmd.append('-file_layer (value)')
- ''' Proxy scale: 1/2, 1/4, 1/8'''
- cmd.append('-file_proxy 1/2')
- ''' Cache: True, False.'''
- cmd.append('-file_cache True')
- ''' Start in full screen '''
- # cmd.append('-window_fullscreen')
- ''' Toolbar controls: False, True.'''
- # cmd.append("-window_toolbar False")
- ''' Window controls: False, True.'''
- # cmd.append("-window_playbar False")
- ''' Grid overlay: None, 1x1, 10x10, 100x100.'''
- # cmd.append("-view_grid None")
- ''' Heads up display: True, False.'''
- # cmd.append("-view_hud True")
- ''' Playback: Stop, Forward, Reverse.'''
- cmd.append("-playback Forward")
- ''' Frame.'''
- # cmd.append("-playback_frame (value)")
- cmd.append("-playback_speed " + str(fps))
- ''' Timer: Sleep, Timeout. Value: Sleep.'''
- # cmd.append("-playback_timer (value)")
- ''' Timer resolution (seconds): 0.001.'''
- # cmd.append("-playback_timer_resolution (value)")
- ''' Time units: Timecode, Frames.'''
- cmd.append("-time_units Frames")
- # DJV Options End ################################################
-
- # PATH TO COMPONENT
- cmd.append(os.path.normpath(filename))
-
- # Run DJV with these commands
- subprocess.Popen(' '.join(cmd))
-
- return {
- 'success': True,
- 'message': 'DJV View started.'
- }
-
- if 'items' not in event["data"]:
- event["data"]['items'] = []
+ def load_config_data(self):
+ path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json']
+ filepath = os.path.sep.join(path_items)
+ data = dict()
try:
- for entity in entities:
- versions = []
- allowed_types = ["img", "mov", "exr"]
-
- if entity.entity_type.lower() == "assetversion":
- if entity['components'][0]['file_type'] in allowed_types:
- versions.append(entity)
-
- elif entity.entity_type.lower() == "task":
- # AssetVersions are obtainable only from shot!
- shotentity = entity['parent']
-
- for asset in shotentity['assets']:
- for version in asset['versions']:
- # Get only AssetVersion of selected task
- if version['task']['id'] != entity['id']:
- continue
- # Get only components with allowed type
- filetype = version['components'][0]['file_type']
- if filetype in allowed_types:
- versions.append(version)
-
- # Raise error if no components were found
- if len(versions) < 1:
- raise ValueError('There are no Asset Versions to open.')
-
- for version in versions:
- for component in version['components']:
- label = "v{0} - {1} - {2}"
-
- label = label.format(
- str(version['version']).zfill(3),
- version['asset']['type']['name'],
- component['name']
- )
-
- try:
- # TODO This is proper way to get filepath!!!
- # THIS WON'T WORK RIGHT NOW
- location = component[
- 'component_locations'
- ][0]['location']
- file_path = location.get_filesystem_path(component)
- # if component.isSequence():
- # if component.getMembers():
- # frame = int(
- # component.getMembers()[0].getName()
- # )
- # file_path = file_path % frame
- except Exception:
- # This works but is NOT proper way
- file_path = component[
- 'component_locations'
- ][0]['resource_identifier']
-
- event["data"]["items"].append(
- {"label": label, "value": file_path}
- )
-
+ with open(filepath) as data_file:
+ data = json.load(data_file)
except Exception as e:
+ log.warning(
+ 'Failed to load data from DJV presets file ({})'.format(e)
+ )
+
+ self.config_data = data
+
+ def set_djv_path(self):
+ for path in self.config_data.get("djv_paths", []):
+ if os.path.exists(path):
+ self.djv_path = path
+ break
+
+ def interface(self, session, entities, event):
+ if event['data'].get('values', {}):
+ return
+
+ entity = entities[0]
+ versions = []
+
+ entity_type = entity.entity_type.lower()
+ if entity_type == "assetversion":
+ if (
+ entity[
+ 'components'
+ ][0]['file_type'][1:] in self.allowed_types
+ ):
+ versions.append(entity)
+ else:
+ master_entity = entity
+ if entity_type == "task":
+ master_entity = entity['parent']
+
+ for asset in master_entity['assets']:
+ for version in asset['versions']:
+ # Get only AssetVersion of selected task
+ if (
+ entity_type == "task" and
+ version['task']['id'] != entity['id']
+ ):
+ continue
+ # Get only components with allowed type
+ filetype = version['components'][0]['file_type']
+ if filetype[1:] in self.allowed_types:
+ versions.append(version)
+
+ if len(versions) < 1:
return {
'success': False,
- 'message': str(e)
+ 'message': 'There are no Asset Versions to open.'
}
- return {
- "items": [
- {
- "label": "Items to view",
- "type": "enumerator",
- "name": "path",
- "data": sorted(
- event["data"]['items'],
- key=itemgetter("label"),
- reverse=True
- )
- }
- ]
- }
+ items = []
+ base_label = "v{0} - {1} - {2}"
+ default_component = self.config_data.get(
+ 'default_component', None
+ )
+ last_available = None
+ select_value = None
+ for version in versions:
+ for component in version['components']:
+ label = base_label.format(
+ str(version['version']).zfill(3),
+ version['asset']['type']['name'],
+ component['name']
+ )
+ try:
+ location = component[
+ 'component_locations'
+ ][0]['location']
+ file_path = location.get_filesystem_path(component)
+ except Exception:
+ file_path = component[
+ 'component_locations'
+ ][0]['resource_identifier']
+
+ if os.path.isdir(os.path.dirname(file_path)):
+ last_available = file_path
+ if component['name'] == default_component:
+ select_value = file_path
+ items.append(
+ {'label': label, 'value': file_path}
+ )
+
+ if len(items) == 0:
+ return {
+ 'success': False,
+ 'message': (
+ 'There are no Asset Versions with accessible path.'
+ )
+ }
+
+ item = {
+ 'label': 'Items to view',
+ 'type': 'enumerator',
+ 'name': 'path',
+ 'data': sorted(
+ items,
+ key=itemgetter('label'),
+ reverse=True
+ )
+ }
+ if select_value is not None:
+ item['value'] = select_value
+ else:
+ item['value'] = last_available
+
+ return {'items': [item]}
+
+ def launch(self, session, entities, event):
+ """Callback method for DJVView action."""
+
+ # Launching application
+ if "values" not in event["data"]:
+ return
+ filename = event['data']['values']['path']
+
+ fps = entities[0].get('custom_attributes', {}).get('fps', None)
+
+ cmd = []
+ # DJV path
+ cmd.append(os.path.normpath(self.djv_path))
+ # DJV Options Start ##############################################
+ # '''layer name'''
+ # cmd.append('-file_layer (value)')
+ # ''' Proxy scale: 1/2, 1/4, 1/8'''
+ # cmd.append('-file_proxy 1/2')
+ # ''' Cache: True, False.'''
+ # cmd.append('-file_cache True')
+ # ''' Start in full screen '''
+ # cmd.append('-window_fullscreen')
+ # ''' Toolbar controls: False, True.'''
+ # cmd.append("-window_toolbar False")
+ # ''' Window controls: False, True.'''
+ # cmd.append("-window_playbar False")
+ # ''' Grid overlay: None, 1x1, 10x10, 100x100.'''
+ # cmd.append("-view_grid None")
+ # ''' Heads up display: True, False.'''
+ # cmd.append("-view_hud True")
+ ''' Playback: Stop, Forward, Reverse.'''
+ cmd.append("-playback Forward")
+ # ''' Frame.'''
+ # cmd.append("-playback_frame (value)")
+ if fps is not None:
+ cmd.append("-playback_speed {}".format(int(fps)))
+ # ''' Timer: Sleep, Timeout. Value: Sleep.'''
+ # cmd.append("-playback_timer (value)")
+ # ''' Timer resolution (seconds): 0.001.'''
+ # cmd.append("-playback_timer_resolution (value)")
+ ''' Time units: Timecode, Frames.'''
+ cmd.append("-time_units Frames")
+ # DJV Options End ################################################
+
+ # PATH TO COMPONENT
+ cmd.append(os.path.normpath(filename))
+
+ try:
+ # Run DJV with these commands
+ subprocess.Popen(' '.join(cmd))
+ except FileNotFoundError:
+ return {
+ 'success': False,
+ 'message': 'File "{}" was not found.'.format(
+ os.path.basename(filename)
+ )
+ }
+
+ return True
def register(session):
"""Register hooks."""
if not isinstance(session, ftrack_api.session.Session):
return
- action = DJVViewAction(session)
- action.register()
+ DJVViewAction(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/actions/action_job_killer.py b/pype/ftrack/actions/action_job_killer.py
index 184053ed47..440fdc1654 100644
--- a/pype/ftrack/actions/action_job_killer.py
+++ b/pype/ftrack/actions/action_job_killer.py
@@ -16,7 +16,13 @@ class JobKiller(BaseAction):
#: Action label.
label = 'Job Killer'
#: Action description.
- description = 'Killing all running jobs younger than day'
+ description = 'Killing selected running jobs'
+ #: roles that are allowed to register this action
+ role_list = ['Pypeclub', 'Administrator']
+ icon = (
+ 'https://cdn2.iconfinder.com/data/icons/new-year-resolutions/64/'
+ 'resolutions-23-512.png'
+ )
def discover(self, session, entities, event):
''' Validation '''
@@ -30,29 +36,42 @@ class JobKiller(BaseAction):
jobs = session.query(
'select id, status from Job'
' where status in ("queued", "running")'
- )
+ ).all()
items = []
import json
+ item_splitter = {'type': 'label', 'value': '---'}
for job in jobs:
data = json.loads(job['data'])
user = job['user']['username']
created = job['created_at'].strftime('%d.%m.%Y %H:%M:%S')
- label = '{}/ {}/ {}'.format(
+ label = '{} - {} - {}'.format(
data['description'], created, user
)
+ item_label = {
+ 'type': 'label',
+ 'value': label
+ }
item = {
- 'label': label,
'name': job['id'],
'type': 'boolean',
'value': False
}
+ if len(items) > 0:
+ items.append(item_splitter)
+ items.append(item_label)
items.append(item)
- return {
- 'items': items,
- 'title': title
- }
+ if len(items) == 0:
+ return {
+ 'success': False,
+ 'message': 'Didn\'t found any running jobs'
+ }
+ else:
+ return {
+ 'items': items,
+ 'title': title
+ }
def launch(self, session, entities, event):
""" GET JOB """
@@ -104,8 +123,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- action_handler = JobKiller(session)
- action_handler.register()
+ JobKiller(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/actions/action_set_version.py b/pype/ftrack/actions/action_set_version.py
index 9156f23055..3954733041 100644
--- a/pype/ftrack/actions/action_set_version.py
+++ b/pype/ftrack/actions/action_set_version.py
@@ -80,8 +80,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- action_handler = SetVersion(session)
- action_handler.register()
+ SetVersion(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/actions/action_sync_to_avalon_local.py b/pype/ftrack/actions/action_sync_to_avalon_local.py
index 97ab49a272..88a25ed3ac 100644
--- a/pype/ftrack/actions/action_sync_to_avalon_local.py
+++ b/pype/ftrack/actions/action_sync_to_avalon_local.py
@@ -54,6 +54,8 @@ class SyncToAvalon(BaseAction):
'https://cdn1.iconfinder.com/data/icons/hawcons/32/'
'699650-icon-92-inbox-download-512.png'
)
+ #: roles that are allowed to register this action
+ role_list = ['Pypeclub']
#: Action priority
priority = 200
@@ -63,22 +65,11 @@ class SyncToAvalon(BaseAction):
def discover(self, session, entities, event):
''' Validation '''
- role_check = False
- discover = False
- role_list = ['Pypeclub']
- user_id = event['source']['user']['id']
- user = session.query('User where id is ' + user_id).one()
+ for entity in entities:
+ if entity.entity_type.lower() not in ['task', 'assetversion']:
+ return True
- for role in user['user_security_roles']:
- if role['security_role']['name'] in role_list:
- role_check = True
- if role_check is True:
- for entity in entities:
- if entity.entity_type.lower() not in ['task', 'assetversion']:
- discover = True
- break
-
- return discover
+ return False
def launch(self, session, entities, event):
message = ""
@@ -91,15 +82,11 @@ class SyncToAvalon(BaseAction):
'user': user,
'status': 'running',
'data': json.dumps({
- 'description': 'Synch Ftrack to Avalon.'
+ 'description': 'Sync Ftrack to Avalon.'
})
})
-
+ session.commit()
try:
- self.log.info(
- "Action <" + self.__class__.__name__ + "> is running"
- )
-
self.importable = []
# get from top entity in hierarchy all parent entities
@@ -146,26 +133,11 @@ class SyncToAvalon(BaseAction):
)
if 'errors' in result and len(result['errors']) > 0:
- items = []
- for error in result['errors']:
- for key, message in error.items():
- name = key.lower().replace(' ', '')
- info = {
- 'label': key,
- 'type': 'textarea',
- 'name': name,
- 'value': message
- }
- items.append(info)
- self.log.error(
- '{}: {}'.format(key, message)
- )
- title = 'Hey You! Few Errors were raised! (*look below*)'
-
job['status'] = 'failed'
session.commit()
- self.show_interface(event, items, title)
+ ftracklib.show_errors(self, event, result['errors'])
+
return {
'success': False,
'message': "Sync to avalon FAILED"
@@ -176,7 +148,6 @@ class SyncToAvalon(BaseAction):
avalon_project = result['project']
job['status'] = 'done'
- self.log.info('Synchronization to Avalon was successfull!')
except ValueError as ve:
job['status'] = 'failed'
@@ -234,8 +205,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- action_handler = SyncToAvalon(session)
- action_handler.register()
+ SyncToAvalon(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/actions/action_test.py b/pype/ftrack/actions/action_test.py
index 31bcd4f518..ad97cba487 100644
--- a/pype/ftrack/actions/action_test.py
+++ b/pype/ftrack/actions/action_test.py
@@ -1,5 +1,3 @@
-# :coding: utf-8
-# :copyright: Copyright (c) 2017 ftrack
import sys
import argparse
import logging
@@ -27,20 +25,17 @@ class TestAction(BaseAction):
description = 'Test action'
#: priority
priority = 10000
+ #: roles that are allowed to register this action
+ role_list = ['Pypeclub']
+ icon = (
+ 'https://cdn4.iconfinder.com/data/icons/hospital-19/512/'
+ '8_hospital-512.png'
+ )
def discover(self, session, entities, event):
''' Validation '''
- discover = False
- roleList = ['Pypeclub']
- userId = event['source']['user']['id']
- user = session.query('User where id is ' + userId).one()
- for role in user['user_security_roles']:
- if role['security_role']['name'] in roleList:
- discover = True
- break
-
- return discover
+ return True
def launch(self, session, entities, event):
self.log.info(event)
@@ -54,8 +49,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- action_handler = TestAction(session)
- action_handler.register()
+ TestAction(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/actions/action_thumbToChildern.py b/pype/ftrack/actions/action_thumbToChildern.py
index 8b31c4b7e9..2ff6faec48 100644
--- a/pype/ftrack/actions/action_thumbToChildern.py
+++ b/pype/ftrack/actions/action_thumbToChildern.py
@@ -1,6 +1,3 @@
-# :coding: utf-8
-# :copyright: Copyright (c) 2015 Milan Kolar
-
import sys
import argparse
import logging
@@ -72,8 +69,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- action_handler = ThumbToChildren(session)
- action_handler.register()
+ ThumbToChildren(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/actions/action_thumbToParent.py b/pype/ftrack/actions/action_thumbToParent.py
index 56d2c94a46..98124aca70 100644
--- a/pype/ftrack/actions/action_thumbToParent.py
+++ b/pype/ftrack/actions/action_thumbToParent.py
@@ -1,6 +1,3 @@
-# :coding: utf-8
-# :copyright: Copyright (c) 2015 Milan Kolar
-
import sys
import argparse
import logging
@@ -94,8 +91,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- action_handler = ThumbToParent(session)
- action_handler.register()
+ ThumbToParent(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/credentials.py b/pype/ftrack/credentials.py
index 6f756e8a52..89353ea984 100644
--- a/pype/ftrack/credentials.py
+++ b/pype/ftrack/credentials.py
@@ -1,54 +1,65 @@
import os
-import toml
-
+import json
import ftrack_api
import appdirs
config_path = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
-config_name = 'ftrack_cred.toml'
-fpath = os.path.join(config_path, config_name)
-folder = os.path.dirname(fpath)
-
-if not os.path.isdir(folder):
- os.makedirs(folder)
-
-
-def _get_credentials():
-
- folder = os.path.dirname(fpath)
+action_file_name = 'ftrack_cred.json'
+event_file_name = 'ftrack_event_cred.json'
+action_fpath = os.path.join(config_path, action_file_name)
+event_fpath = os.path.join(config_path, event_file_name)
+folders = set([os.path.dirname(action_fpath), os.path.dirname(event_fpath)])
+for folder in folders:
if not os.path.isdir(folder):
os.makedirs(folder)
+
+def _get_credentials(event=False):
+ if event:
+ fpath = event_fpath
+ else:
+ fpath = action_fpath
+
+ credentials = {}
try:
file = open(fpath, 'r')
+ credentials = json.load(file)
except Exception:
- filecreate = open(fpath, 'w')
- filecreate.close()
- file = open(fpath, 'r')
+ file = open(fpath, 'w')
- credentials = toml.load(file)
file.close()
return credentials
-def _save_credentials(username, apiKey):
- file = open(fpath, 'w')
-
+def _save_credentials(username, apiKey, event=False, auto_connect=None):
data = {
'username': username,
'apiKey': apiKey
}
- credentials = toml.dumps(data)
- file.write(credentials)
+ if event:
+ fpath = event_fpath
+ if auto_connect is None:
+ cred = _get_credentials(True)
+ auto_connect = cred.get('auto_connect', False)
+ data['auto_connect'] = auto_connect
+ else:
+ fpath = action_fpath
+
+ file = open(fpath, 'w')
+ file.write(json.dumps(data))
file.close()
-def _clear_credentials():
- file = open(fpath, 'w').close()
+def _clear_credentials(event=False):
+ if event:
+ fpath = event_fpath
+ else:
+ fpath = action_fpath
+ open(fpath, 'w').close()
_set_env(None, None)
diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py
index 4fd52dcab5..22358cd775 100644
--- a/pype/ftrack/events/action_sync_to_avalon.py
+++ b/pype/ftrack/events/action_sync_to_avalon.py
@@ -78,6 +78,7 @@ class Sync_To_Avalon(BaseAction):
for role in user['user_security_roles']:
if role['security_role']['name'] in roleList:
roleCheck = True
+ break
if roleCheck is True:
for entity in entities:
if entity.entity_type.lower() not in ['task', 'assetversion']:
@@ -97,15 +98,11 @@ class Sync_To_Avalon(BaseAction):
'user': user,
'status': 'running',
'data': json.dumps({
- 'description': 'Synch Ftrack to Avalon.'
+ 'description': 'Sync Ftrack to Avalon.'
})
})
-
+ session.commit()
try:
- self.log.info(
- "Action <" + self.__class__.__name__ + "> is running"
- )
-
self.importable = []
# get from top entity in hierarchy all parent entities
@@ -152,26 +149,11 @@ class Sync_To_Avalon(BaseAction):
)
if 'errors' in result and len(result['errors']) > 0:
- items = []
- for error in result['errors']:
- for key, message in error.items():
- name = key.lower().replace(' ', '')
- info = {
- 'label': key,
- 'type': 'textarea',
- 'name': name,
- 'value': message
- }
- items.append(info)
- self.log.error(
- '{}: {}'.format(key, message)
- )
- title = 'Hey You! Few Errors were raised! (*look below*)'
-
job['status'] = 'failed'
session.commit()
- self.show_interface(event, items, title)
+ lib.show_errors(self, event, result['errors'])
+
return {
'success': False,
'message': "Sync to avalon FAILED"
@@ -183,7 +165,6 @@ class Sync_To_Avalon(BaseAction):
job['status'] = 'done'
session.commit()
- self.log.info('Synchronization to Avalon was successfull!')
except ValueError as ve:
job['status'] = 'failed'
@@ -239,8 +220,7 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- action_handler = Sync_To_Avalon(session)
- action_handler.register()
+ Sync_To_Avalon(session).register()
def main(arguments=None):
diff --git a/pype/ftrack/events/event_del_avalon_id_from_new.py b/pype/ftrack/events/event_del_avalon_id_from_new.py
index 43812675e5..7659191637 100644
--- a/pype/ftrack/events/event_del_avalon_id_from_new.py
+++ b/pype/ftrack/events/event_del_avalon_id_from_new.py
@@ -13,25 +13,25 @@ class DelAvalonIdFromNew(BaseEvent):
'''
priority = Sync_to_Avalon.priority - 1
- def launch(self, event):
+ def launch(self, session, event):
created = []
entities = event['data']['entities']
for entity in entities:
try:
entity_id = entity['entityId']
- if entity['action'] == 'add':
+ if entity.get('action', None) == 'add':
id_dict = entity['changes']['id']
if id_dict['new'] is not None and id_dict['old'] is None:
created.append(id_dict['new'])
elif (
- entity['action'] == 'update' and
+ entity.get('action', None) == 'update' and
get_ca_mongoid() in entity['keys'] and
entity_id in created
):
- ftrack_entity = self.session.get(
+ ftrack_entity = session.get(
self._get_entity_type(entity),
entity_id
)
@@ -44,19 +44,12 @@ class DelAvalonIdFromNew(BaseEvent):
ftrack_entity['custom_attributes'][
get_ca_mongoid()
] = ''
- self.session.commit()
+ session.commit()
except Exception:
+ session.rollback()
continue
- def register(self):
- '''Registers the event, subscribing the discover and launch topics.'''
- self.session.event_hub.subscribe(
- 'topic=ftrack.update',
- self.launch,
- priority=self.priority
- )
-
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
diff --git a/pype/ftrack/events/event_next_task_update.py b/pype/ftrack/events/event_next_task_update.py
index 3391b3516f..e677e53fb2 100644
--- a/pype/ftrack/events/event_next_task_update.py
+++ b/pype/ftrack/events/event_next_task_update.py
@@ -34,49 +34,56 @@ class NextTaskUpdate(BaseEvent):
return None
- def launch(self, session, entities, event):
+ def launch(self, session, event):
'''Propagates status from version to task when changed'''
# self.log.info(event)
# start of event procedure ----------------------------------
for entity in event['data'].get('entities', []):
+ changes = entity.get('changes', None)
+ if changes is None:
+ continue
+ statusid_changes = changes.get('statusid', {})
+ if (
+ entity['entityType'] != 'task' or
+ 'statusid' not in entity['keys'] or
+ statusid_changes.get('new', None) is None or
+ statusid_changes.get('old', None) is None
+ ):
+ continue
- if (entity['entityType'] == 'task' and
- 'statusid' in entity['keys']):
+ task = session.get('Task', entity['entityId'])
- task = session.get('Task', entity['entityId'])
+ status = session.get('Status',
+ entity['changes']['statusid']['new'])
+ state = status['state']['name']
- status = session.get('Status',
- entity['changes']['statusid']['new'])
- state = status['state']['name']
+ next_task = self.get_next_task(task, session)
- next_task = self.get_next_task(task, session)
+ # Setting next task to Ready, if on NOT READY
+ if next_task and state == 'Done':
+ if next_task['status']['name'].lower() == 'not ready':
- # Setting next task to Ready, if on NOT READY
- if next_task and state == 'Done':
- if next_task['status']['name'].lower() == 'not ready':
+ # Get path to task
+ path = task['name']
+ for p in task['ancestors']:
+ path = p['name'] + '/' + path
- # Get path to task
- path = task['name']
- for p in task['ancestors']:
- path = p['name'] + '/' + path
-
- # Setting next task status
- try:
- query = 'Status where name is "{}"'.format('Ready')
- status_to_set = session.query(query).one()
- next_task['status'] = status_to_set
- except Exception as e:
- self.log.warning((
- '!!! [ {} ] status couldnt be set: [ {} ]'
- ).format(path, e))
- else:
- self.log.info((
- '>>> [ {} ] updated to [ Ready ]'
- ).format(path))
-
- session.commit()
+ # Setting next task status
+ try:
+ query = 'Status where name is "{}"'.format('Ready')
+ status_to_set = session.query(query).one()
+ next_task['status'] = status_to_set
+ session.commit()
+ self.log.info((
+ '>>> [ {} ] updated to [ Ready ]'
+ ).format(path))
+ except Exception as e:
+ self.log.warning((
+ '!!! [ {} ] status couldnt be set: [ {} ]'
+ ).format(path, e))
+ session.rollback()
def register(session, **kw):
@@ -84,5 +91,4 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- event = NextTaskUpdate(session)
- event.register()
+ NextTaskUpdate(session).register()
diff --git a/pype/ftrack/events/event_radio_buttons.py b/pype/ftrack/events/event_radio_buttons.py
index 6d06326365..f96d90307d 100644
--- a/pype/ftrack/events/event_radio_buttons.py
+++ b/pype/ftrack/events/event_radio_buttons.py
@@ -2,9 +2,12 @@ import ftrack_api
from pype.ftrack import BaseEvent
+ignore_me = True
+
+
class Radio_buttons(BaseEvent):
- def launch(self, session, entities, event):
+ def launch(self, session, event):
'''Provides a readio button behaviour to any bolean attribute in
radio_button group.'''
@@ -31,7 +34,6 @@ class Radio_buttons(BaseEvent):
session.commit()
-
def register(session):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
diff --git a/pype/ftrack/events/event_sync_to_avalon.py b/pype/ftrack/events/event_sync_to_avalon.py
index 994c58637d..1deaa3d17e 100644
--- a/pype/ftrack/events/event_sync_to_avalon.py
+++ b/pype/ftrack/events/event_sync_to_avalon.py
@@ -4,8 +4,12 @@ from pype.ftrack import BaseEvent, lib
class Sync_to_Avalon(BaseEvent):
- def launch(self, session, entities, event):
+ ignore_entityType = [
+ 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer',
+ 'socialfeed', 'socialnotification', 'timelog'
+ ]
+ def launch(self, session, event):
ca_mongoid = lib.get_ca_mongoid()
# If mongo_id textfield has changed: RETURN!
# - infinite loop
@@ -14,6 +18,7 @@ class Sync_to_Avalon(BaseEvent):
if ca_mongoid in ent['keys']:
return
+ entities = self._get_entities(session, event, self.ignore_entityType)
ft_project = None
# get project
for entity in entities:
@@ -84,23 +89,9 @@ class Sync_to_Avalon(BaseEvent):
custom_attributes=custom_attributes
)
if 'errors' in result and len(result['errors']) > 0:
- items = []
- for error in result['errors']:
- for key, message in error.items():
- name = key.lower().replace(' ', '')
- info = {
- 'label': key,
- 'type': 'textarea',
- 'name': name,
- 'value': message
- }
- items.append(info)
- self.log.error(
- '{}: {}'.format(key, message)
- )
session.commit()
- title = 'Hey You! You raised few Errors! (*look below*)'
- self.show_interface(event, items, title)
+ lib.show_errors(self, event, result['errors'])
+
return
if avalon_project is None:
@@ -109,56 +100,21 @@ class Sync_to_Avalon(BaseEvent):
except Exception as e:
message = str(e)
+ title = 'Hey You! Unknown Error has been raised! (*look below*)'
ftrack_message = (
'SyncToAvalon event ended with unexpected error'
- ' please check log file for more information.'
+ ' please check log file or contact Administrator'
+ ' for more information.'
)
- items = [{
- 'label': 'Fatal Error',
- 'type': 'textarea',
- 'name': 'error',
- 'value': ftrack_message
- }]
- title = 'Hey You! Unknown Error has been raised! (*look below*)'
+ items = [
+ {'type': 'label', 'value':'# Fatal Error'},
+ {'type': 'label', 'value': '{}
'.format(ftrack_message)}
+ ]
self.show_interface(event, items, title)
- self.log.error(message)
+ self.log.error('Fatal error during sync: {}'.format(message))
return
- def _launch(self, event):
- self.session.reset()
-
- args = self._translate_event(
- self.session, event
- )
-
- self.launch(
- self.session, *args
- )
- return
-
- def _translate_event(self, session, event):
- exceptions = [
- 'assetversion', 'job', 'user', 'reviewsessionobject', 'timer',
- 'socialfeed', 'timelog'
- ]
- _selection = event['data'].get('entities', [])
-
- _entities = list()
- for entity in _selection:
- if entity['entityType'] in exceptions:
- continue
- _entities.append(
- (
- session.get(
- self._get_entity_type(entity),
- entity.get('entityId')
- )
- )
- )
-
- return [_entities, event]
-
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
@@ -166,5 +122,4 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- event = Sync_to_Avalon(session)
- event.register()
+ Sync_to_Avalon(session).register()
diff --git a/pype/ftrack/events/event_test.py b/pype/ftrack/events/event_test.py
index 43c805119e..46e16cbb95 100644
--- a/pype/ftrack/events/event_test.py
+++ b/pype/ftrack/events/event_test.py
@@ -13,11 +13,11 @@ class Test_Event(BaseEvent):
priority = 10000
- def launch(self, session, entities, event):
+ def launch(self, session, event):
'''just a testing event'''
- self.log.info(event)
+ # self.log.info(event)
return True
@@ -27,5 +27,4 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- event = Test_Event(session)
- event.register()
+ Test_Event(session).register()
diff --git a/pype/ftrack/events/event_thumbnail_updates.py b/pype/ftrack/events/event_thumbnail_updates.py
index b6e0b12d56..50089e26b8 100644
--- a/pype/ftrack/events/event_thumbnail_updates.py
+++ b/pype/ftrack/events/event_thumbnail_updates.py
@@ -4,7 +4,7 @@ from pype.ftrack import BaseEvent
class ThumbnailEvents(BaseEvent):
- def launch(self, session, entities, event):
+ def launch(self, session, event):
'''just a testing event'''
# self.log.info(event)
@@ -23,8 +23,12 @@ class ThumbnailEvents(BaseEvent):
parent['name'], task['name']))
# Update task thumbnail from published version
- if (entity['entityType'] == 'assetversion' and
- entity['action'] == 'encoded'):
+ # if (entity['entityType'] == 'assetversion' and
+ # entity['action'] == 'encoded'):
+ if (
+ entity['entityType'] == 'assetversion'
+ and 'thumbid' in entity['keys']
+ ):
version = session.get('AssetVersion', entity['entityId'])
thumbnail = version.get('thumbnail')
@@ -38,11 +42,12 @@ class ThumbnailEvents(BaseEvent):
session.commit()
+ pass
+
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
- event = ThumbnailEvents(session)
- event.register()
+ ThumbnailEvents(session).register()
diff --git a/pype/ftrack/events/event_version_to_task_statuses.py b/pype/ftrack/events/event_version_to_task_statuses.py
index e91664a6fe..d1393e622e 100644
--- a/pype/ftrack/events/event_version_to_task_statuses.py
+++ b/pype/ftrack/events/event_version_to_task_statuses.py
@@ -4,20 +4,25 @@ from pype.ftrack import BaseEvent
class VersionToTaskStatus(BaseEvent):
- def launch(self, session, entities, event):
+ def launch(self, session, event):
'''Propagates status from version to task when changed'''
+ session.commit()
- # self.log.info(event)
# start of event procedure ----------------------------------
for entity in event['data'].get('entities', []):
# Filter non-assetversions
- if (entity['entityType'] == 'assetversion' and
- 'statusid' in entity['keys']):
+ if (
+ entity['entityType'] == 'assetversion' and
+ 'statusid' in entity.get('keys', [])
+ ):
version = session.get('AssetVersion', entity['entityId'])
- version_status = session.get(
- 'Status', entity['changes']['statusid']['new']
- )
+ try:
+ version_status = session.get(
+ 'Status', entity['changes']['statusid']['new']
+ )
+ except Exception:
+ continue
task_status = version_status
task = version['task']
self.log.info('>>> version status: [ {} ]'.format(
@@ -36,10 +41,17 @@ class VersionToTaskStatus(BaseEvent):
if status_to_set is not None:
query = 'Status where name is "{}"'.format(status_to_set)
- task_status = session.query(query).one()
+ try:
+ task_status = session.query(query).one()
+ except Exception:
+ self.log.info(
+ '!!! status was not found in Ftrack [ {} ]'.format(
+ status_to_set
+ ))
+ continue
# Proceed if the task status was set
- if task_status:
+ if task_status is not None:
# Get path to task
path = task['name']
for p in task['ancestors']:
@@ -62,5 +74,4 @@ def register(session, **kw):
if not isinstance(session, ftrack_api.session.Session):
return
- event = VersionToTaskStatus(session)
- event.register()
+ VersionToTaskStatus(session).register()
diff --git a/pype/ftrack/ftrack_run.py b/pype/ftrack/ftrack_run.py
index a2214e7912..a722f8d3fe 100644
--- a/pype/ftrack/ftrack_run.py
+++ b/pype/ftrack/ftrack_run.py
@@ -9,7 +9,7 @@ from app.vendor.Qt import QtCore, QtGui, QtWidgets
from pype.ftrack import credentials, login_dialog as login_dialog
from pype.vendor.pynput import mouse, keyboard
-from FtrackServer import FtrackServer
+from . import FtrackServer
from pype import api as pype
diff --git a/pype/ftrack/ftrack_server/__init__.py b/pype/ftrack/ftrack_server/__init__.py
new file mode 100644
index 0000000000..b7f8651da0
--- /dev/null
+++ b/pype/ftrack/ftrack_server/__init__.py
@@ -0,0 +1,8 @@
+from .ftrack_server import FtrackServer
+from . import event_server, event_server_cli
+
+__all__ = [
+ 'event_server',
+ 'event_server_cli',
+ 'FtrackServer'
+]
diff --git a/pype/ftrack/event_server.py b/pype/ftrack/ftrack_server/event_server.py
similarity index 80%
rename from pype/ftrack/event_server.py
rename to pype/ftrack/ftrack_server/event_server.py
index 9c6207d6a2..e824d1d899 100644
--- a/pype/ftrack/event_server.py
+++ b/pype/ftrack/ftrack_server/event_server.py
@@ -1,6 +1,6 @@
import sys
from pype.ftrack import credentials, login_dialog as login_dialog
-from FtrackServer import FtrackServer
+from pype.ftrack.ftrack_server import FtrackServer
from app.vendor.Qt import QtWidgets
from pype import api
@@ -9,10 +9,12 @@ log = api.Logger.getLogger(__name__, "ftrack-event-server")
class EventServer:
def __init__(self):
- self.login_widget = login_dialog.Login_Dialog_ui(self)
+ self.login_widget = login_dialog.Login_Dialog_ui(
+ parent=self, is_event=True
+ )
self.event_server = FtrackServer('event')
- cred = credentials._get_credentials()
+ cred = credentials._get_credentials(True)
if 'username' in cred and 'apiKey' in cred:
self.login_widget.user_input.setText(cred['username'])
@@ -24,6 +26,7 @@ class EventServer:
def loginChange(self):
log.info("Logged successfully")
+
self.login_widget.close()
self.event_server.run_server()
diff --git a/pype/ftrack/ftrack_server/event_server_cli.py b/pype/ftrack/ftrack_server/event_server_cli.py
new file mode 100644
index 0000000000..a466bf5723
--- /dev/null
+++ b/pype/ftrack/ftrack_server/event_server_cli.py
@@ -0,0 +1,114 @@
+import sys
+from pype.ftrack import credentials
+from pype.ftrack.ftrack_server import FtrackServer
+from app import api
+
+log = api.Logger.getLogger(__name__, "ftrack-event-server-cli")
+
+possible_yes = ['y', 'yes']
+possible_no = ['n', 'no']
+possible_third = ['a', 'auto']
+possible_exit = ['exit']
+
+
+def ask_yes_no(third=False):
+ msg = "Y/N:"
+ if third:
+ msg = "Y/N/AUTO:"
+ log.info(msg)
+ response = input().lower()
+ if response in possible_exit:
+ sys.exit()
+ elif response in possible_yes:
+ return True
+ elif response in possible_no:
+ return False
+ else:
+ all_entries = possible_no
+ all_entries.extend(possible_yes)
+ if third is True:
+ if response in possible_third:
+ return 'auto'
+ else:
+ all_entries.extend(possible_third)
+ all_entries.extend(possible_exit)
+ all_entries = ', '.join(all_entries)
+ log.info(
+ 'Invalid input. Possible entries: [{}]. Try it again:'.foramt(
+ all_entries
+ )
+ )
+ return ask_yes_no()
+
+
+def cli_login():
+ enter_cred = True
+ cred_data = credentials._get_credentials(True)
+
+ user = cred_data.get('username', None)
+ key = cred_data.get('apiKey', None)
+ auto = cred_data.get('auto_connect', False)
+ if user is None or key is None:
+ log.info(
+ 'Credentials are not set. Do you want to enter them now? (Y/N)'
+ )
+ if ask_yes_no() is False:
+ log.info("Exiting...")
+ return
+ elif credentials._check_credentials(user, key):
+ if auto is False:
+ log.info((
+ 'Do you want to log with username {}'
+ ' enter "auto" if want to autoconnect next time (Y/N/AUTO)'
+ ).format(
+ user
+ ))
+ result = ask_yes_no(True)
+ if result is True:
+ enter_cred = False
+ elif result == 'auto':
+ credentials._save_credentials(user, key, True, True)
+ enter_cred = False
+ else:
+ enter_cred = False
+ else:
+ log.info(
+ 'Stored credentials are not valid.'
+ ' Do you want enter them now?(Y/N)'
+ )
+ if ask_yes_no() is False:
+ log.info("Exiting...")
+ return
+
+ while enter_cred:
+ log.info('Please enter Ftrack API User:')
+ user = input()
+ log.info('And now enter Ftrack API Key:')
+ key = input()
+ if credentials._check_credentials(user, key):
+ log.info(
+ 'Credentials are valid.'
+ ' Do you want to auto-connect next time?(Y/N)'
+ )
+ credentials._save_credentials(user, key, True, ask_yes_no())
+ enter_cred = False
+ break
+ else:
+ log.info(
+ 'Entered credentials are not valid.'
+ ' Do you want to try it again?(Y/N)'
+ )
+ if ask_yes_no() is False:
+ log.info('Exiting...')
+ return
+
+ server = FtrackServer('event')
+ server.run_server()
+
+
+def main():
+ cli_login()
+
+
+if (__name__ == ('__main__')):
+ main()
diff --git a/pype/ftrack/ftrack_server/ftrack_server.py b/pype/ftrack/ftrack_server/ftrack_server.py
new file mode 100644
index 0000000000..6c63dcf414
--- /dev/null
+++ b/pype/ftrack/ftrack_server/ftrack_server.py
@@ -0,0 +1,160 @@
+import os
+import sys
+import types
+import importlib
+import ftrack_api
+import time
+import logging
+from app.api import Logger
+
+log = Logger.getLogger(__name__)
+
+"""
+# Required - Needed for connection to Ftrack
+FTRACK_SERVER # Ftrack server e.g. "https://myFtrack.ftrackapp.com"
+FTRACK_API_KEY # Ftrack user's API key "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+FTRACK_API_USER # Ftrack username e.g. "user.name"
+
+# Required - Paths to folder with actions
+FTRACK_ACTIONS_PATH # Paths to folders where are located actions
+ - EXAMPLE: "M:/FtrackApi/../actions/"
+FTRACK_EVENTS_PATH # Paths to folders where are located actions
+ - EXAMPLE: "M:/FtrackApi/../events/"
+
+# Required - Needed for import included modules
+PYTHONPATH # Path to ftrack_api and paths to all modules used in actions
+ - path to ftrack_action_handler, etc.
+"""
+
+
+class FtrackServer():
+ def __init__(self, type='action'):
+ """
+ - 'type' is by default set to 'action' - Runs Action server
+ - enter 'event' for Event server
+
+ EXAMPLE FOR EVENT SERVER:
+ ...
+ server = FtrackServer('event')
+ server.run_server()
+ ..
+ """
+ # set Ftrack logging to Warning only - OPTIONAL
+ ftrack_log = logging.getLogger("ftrack_api")
+ ftrack_log.setLevel(logging.WARNING)
+
+ self.type = type
+ self.actionsAvailable = True
+ self.eventsAvailable = True
+ # Separate all paths
+ if "FTRACK_ACTIONS_PATH" in os.environ:
+ all_action_paths = os.environ["FTRACK_ACTIONS_PATH"]
+ self.actionsPaths = all_action_paths.split(os.pathsep)
+ else:
+ self.actionsAvailable = False
+
+ if "FTRACK_EVENTS_PATH" in os.environ:
+ all_event_paths = os.environ["FTRACK_EVENTS_PATH"]
+ self.eventsPaths = all_event_paths.split(os.pathsep)
+ else:
+ self.eventsAvailable = False
+
+ def stop_session(self):
+ if self.session.event_hub.connected is True:
+ self.session.event_hub.disconnect()
+ self.session.close()
+ self.session = None
+
+ def set_files(self, paths):
+ # Iterate all paths
+ functions = []
+ for path in paths:
+ # add path to PYTHON PATH
+ if path not in sys.path:
+ sys.path.append(path)
+
+ # Get all modules with functions
+ for file in os.listdir(path):
+ # Get only .py files with action functions
+ try:
+ if '.pyc' in file or '.py' not in file:
+ continue
+
+ ignore = 'ignore_me'
+ mod = importlib.import_module(os.path.splitext(file)[0])
+ importlib.reload(mod)
+ mod_functions = dict(
+ [
+ (name, function)
+ for name, function in mod.__dict__.items()
+ if isinstance(function, types.FunctionType) or
+ name == ignore
+ ]
+ )
+ # Don't care about ignore_me files
+ if (
+ ignore in mod_functions and
+ mod_functions[ignore] is True
+ ):
+ continue
+ # separate files by register function
+ if 'register' not in mod_functions:
+ msg = (
+ '"{0}" - Missing register method'
+ ).format(file, self.type)
+ log.warning(msg)
+ continue
+
+ functions.append({
+ 'name': file,
+ 'register': mod_functions['register']
+ })
+ except Exception as e:
+ msg = 'Loading of file "{}" failed ({})'.format(
+ file, str(e)
+ )
+ log.warning(msg)
+
+ if len(functions) < 1:
+ raise Exception
+
+ function_counter = 0
+ for function in functions:
+ try:
+ function['register'](self.session)
+ if function_counter%7 == 0:
+ time.sleep(0.1)
+ function_counter += 1
+ except Exception as e:
+ msg = '"{}" - register was not successful ({})'.format(
+ function['name'], str(e)
+ )
+ log.warning(msg)
+
+ def run_server(self):
+ self.session = ftrack_api.Session(auto_connect_event_hub=True,)
+
+ if self.type.lower() == 'event':
+ if self.eventsAvailable is False:
+ msg = (
+ 'FTRACK_EVENTS_PATH is not set'
+ ', event server won\'t launch'
+ )
+ log.error(msg)
+ return
+ self.set_files(self.eventsPaths)
+ else:
+ if self.actionsAvailable is False:
+ msg = (
+ 'FTRACK_ACTIONS_PATH is not set'
+ ', action server won\'t launch'
+ )
+ log.error(msg)
+ return
+ self.set_files(self.actionsPaths)
+
+ log.info(60*"*")
+ log.info('Registration of actions/events has finished!')
+
+ # keep event_hub on session running
+ self.session.event_hub.wait()
diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py
index 4b058fa3c3..7ebd85d71d 100644
--- a/pype/ftrack/lib/avalon_sync.py
+++ b/pype/ftrack/lib/avalon_sync.py
@@ -457,12 +457,17 @@ def get_avalon_project(ft_project):
def get_project_config(entity):
config = {}
config['schema'] = pypelib.get_avalon_project_config_schema()
- config['tasks'] = [{'name': ''}]
+ config['tasks'] = get_tasks(entity)
config['apps'] = get_project_apps(entity)
config['template'] = pypelib.get_avalon_project_template()
return config
+def get_tasks(project):
+ return [
+ {'name': task_type['name']} for task_type in project[
+ 'project_schema']['_task_type_schema']['types']
+ ]
def get_project_apps(entity):
""" Get apps from project
@@ -536,3 +541,26 @@ def get_config_data():
log.warning("{} - {}".format(msg, str(e)))
return data
+
+def show_errors(obj, event, errors):
+ title = 'Hey You! You raised few Errors! (*look below*)'
+ items = []
+ splitter = {'type': 'label', 'value': '---'}
+ for error in errors:
+ for key, message in error.items():
+ error_title = {
+ 'type': 'label',
+ 'value': '# {}'.format(key)
+ }
+ error_message = {
+ 'type': 'label',
+ 'value': '{}
'.format(message)
+ }
+ if len(items) > 0:
+ items.append(splitter)
+ items.append(error_title)
+ items.append(error_message)
+ obj.log.error(
+ '{}: {}'.format(key, message)
+ )
+ obj.show_interface(event, items, title)
diff --git a/pype/ftrack/lib/ftrack_action_handler.py b/pype/ftrack/lib/ftrack_action_handler.py
index a02a4da5e5..c6d6181c1f 100644
--- a/pype/ftrack/lib/ftrack_action_handler.py
+++ b/pype/ftrack/lib/ftrack_action_handler.py
@@ -62,7 +62,6 @@ class BaseAction(BaseHandler):
)
def _launch(self, event):
- self.reset_session()
args = self._translate_event(
self.session, event
)
@@ -85,14 +84,20 @@ class BaseAction(BaseHandler):
def _handle_result(self, session, result, entities, event):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
- result = {
- 'success': result,
- 'message': (
- '{0} launched successfully.'.format(
- self.label
+ if result is True:
+ result = {
+ 'success': result,
+ 'message': (
+ '{0} launched successfully.'.format(self.label)
)
- )
- }
+ }
+ else:
+ result = {
+ 'success': result,
+ 'message': (
+ '{0} launch failed.'.format(self.label)
+ )
+ }
elif isinstance(result, dict):
if 'items' in result:
diff --git a/pype/ftrack/lib/ftrack_app_handler.py b/pype/ftrack/lib/ftrack_app_handler.py
index cb25862883..b5c8ec3a7a 100644
--- a/pype/ftrack/lib/ftrack_app_handler.py
+++ b/pype/ftrack/lib/ftrack_app_handler.py
@@ -1,5 +1,3 @@
-# :coding: utf-8
-# :copyright: Copyright (c) 2017 ftrack
import os
import sys
import platform
@@ -94,17 +92,13 @@ class AppAction(BaseHandler):
'''
- entity = entities[0]
-
- # TODO Should return False if not TASK ?!!!
- # TODO Should return False if more than one entity is selected ?!!!
if (
- len(entities) > 1 or
- entity.entity_type.lower() != 'task'
+ len(entities) != 1 or
+ entities[0].entity_type.lower() != 'task'
):
return False
- ft_project = entity['project']
+ ft_project = entities[0]['project']
database = pypelib.get_avalon_database()
project_name = ft_project['full_name']
@@ -115,9 +109,9 @@ class AppAction(BaseHandler):
if avalon_project is None:
return False
else:
- apps = []
- for app in avalon_project['config']['apps']:
- apps.append(app['name'])
+ apps = [app['name'] for app in avalon_project['config'].get(
+ 'apps', []
+ )]
if self.identifier not in apps:
return False
@@ -243,13 +237,28 @@ class AppAction(BaseHandler):
'''
- self.log.info((
- "Action - {0} ({1}) - just started"
- ).format(self.label, self.identifier))
-
entity = entities[0]
project_name = entity['project']['full_name']
+ # Validate Clockify settings if Clockify is required
+ clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None)
+ if clockify_timer is not None:
+ from pype.clockify import ClockifyAPI
+ clockapi = ClockifyAPI()
+ if clockapi.verify_api() is False:
+ title = 'Launch message'
+ header = '# You Can\'t launch **any Application**'
+ message = (
+ 'You don\'t have set Clockify API'
+ ' key in Clockify settings
'
+ )
+ items = [
+ {'type': 'label', 'value': header},
+ {'type': 'label', 'value': message}
+ ]
+ self.show_interface(event, items, title)
+ return False
+
database = pypelib.get_avalon_database()
# Get current environments
@@ -397,6 +406,31 @@ class AppAction(BaseHandler):
self.log.info('Starting timer for task: ' + task['name'])
user.start_timer(task, force=True)
+ # RUN TIMER IN Clockify
+ if clockify_timer is not None:
+ task_type = task['type']['name']
+ project_name = task['project']['full_name']
+
+ def get_parents(entity):
+ output = []
+ if entity.entity_type.lower() == 'project':
+ return output
+ output.extend(get_parents(entity['parent']))
+ output.append(entity['name'])
+
+ return output
+
+ desc_items = get_parents(task['parent'])
+ desc_items.append(task['name'])
+ description = '/'.join(desc_items)
+
+ project_id = clockapi.get_project_id(project_name)
+ tag_ids = []
+ tag_ids.append(clockapi.get_tag_id(task_type))
+ clockapi.start_time_entry(
+ description, project_id, tag_ids=tag_ids
+ )
+
# Change status of task to In progress
config = get_config_data()
diff --git a/pype/ftrack/lib/ftrack_base_handler.py b/pype/ftrack/lib/ftrack_base_handler.py
index 1bce7e3926..6263cf1f66 100644
--- a/pype/ftrack/lib/ftrack_base_handler.py
+++ b/pype/ftrack/lib/ftrack_base_handler.py
@@ -4,6 +4,13 @@ import time
from pype import api as pype
+class MissingPermision(Exception):
+ def __init__(self, message=None):
+ if message is None:
+ message = 'Ftrack'
+ super().__init__(message)
+
+
class BaseHandler(object):
'''Custom Action base class
@@ -25,10 +32,11 @@ class BaseHandler(object):
self.log = pype.Logger.getLogger(self.__class__.__name__)
# Using decorator
- self.register = self.register_log(self.register)
+ self.register = self.register_decorator(self.register)
+ self.launch = self.launch_log(self.launch)
# Decorator
- def register_log(self, func):
+ def register_decorator(self, func):
@functools.wraps(func)
def wrapper_register(*args, **kwargs):
label = self.__class__.__name__
@@ -37,8 +45,20 @@ class BaseHandler(object):
label = self.label
else:
label = '{} {}'.format(self.label, self.variant)
-
try:
+ if hasattr(self, "role_list") and len(self.role_list) > 0:
+ username = self.session.api_user
+ user = self.session.query(
+ 'User where username is "{}"'.format(username)
+ ).one()
+ available = False
+ for role in user['user_security_roles']:
+ if role['security_role']['name'] in self.role_list:
+ available = True
+ break
+ if available is False:
+ raise MissingPermision
+
start_time = time.perf_counter()
func(*args, **kwargs)
end_time = time.perf_counter()
@@ -46,6 +66,14 @@ class BaseHandler(object):
self.log.info((
'{} "{}" - Registered successfully ({:.4f}sec)'
).format(self.type, label, run_time))
+ except MissingPermision as MPE:
+ self.log.info((
+ '!{} "{}" - You\'re missing required {} permissions'
+ ).format(self.type, label, str(MPE)))
+ except AssertionError as ae:
+ self.log.info((
+ '!{} "{}" - {}'
+ ).format(self.type, label, str(ae)))
except NotImplementedError:
self.log.error((
'{} "{}" - Register method is not implemented'
@@ -58,6 +86,31 @@ class BaseHandler(object):
)
return wrapper_register
+ # Decorator
+ def launch_log(self, func):
+ @functools.wraps(func)
+ def wrapper_launch(*args, **kwargs):
+ label = self.__class__.__name__
+ if hasattr(self, 'label'):
+ if self.variant is None:
+ label = self.label
+ else:
+ label = '{} {}'.format(self.label, self.variant)
+
+ try:
+ self.log.info(('{} "{}": Launched').format(self.type, label))
+ result = func(*args, **kwargs)
+ self.log.info(('{} "{}": Finished').format(self.type, label))
+ return result
+ except Exception as e:
+ msg = '{} "{}": Failed ({})'.format(self.type, label, str(e))
+ self.log.error(msg)
+ return {
+ 'success': False,
+ 'message': msg
+ }
+ return wrapper_launch
+
@property
def session(self):
'''Return current session.'''
@@ -75,6 +128,16 @@ class BaseHandler(object):
raise NotImplementedError()
def _discover(self, event):
+ items = {
+ 'items': [{
+ 'label': self.label,
+ 'variant': self.variant,
+ 'description': self.description,
+ 'actionIdentifier': self.identifier,
+ 'icon': self.icon,
+ }]
+ }
+
args = self._translate_event(
self.session, event
)
@@ -83,18 +146,10 @@ class BaseHandler(object):
self.session, *args
)
- if accepts:
+ if accepts is True:
self.log.debug(u'Discovering action with selection: {0}'.format(
- args[1]['data'].get('selection', [])))
- return {
- 'items': [{
- 'label': self.label,
- 'variant': self.variant,
- 'description': self.description,
- 'actionIdentifier': self.identifier,
- 'icon': self.icon,
- }]
- }
+ event['data'].get('selection', [])))
+ return items
def discover(self, session, entities, event):
'''Return true if we can handle the selected entities.
@@ -118,25 +173,32 @@ class BaseHandler(object):
'''Return *event* translated structure to be used with the API.'''
'''Return *event* translated structure to be used with the API.'''
-
- _selection = event['data'].get('selection', [])
-
- _entities = list()
- for entity in _selection:
- _entities.append(
- (
- session.get(
- self._get_entity_type(entity),
- entity.get('entityId')
- )
- )
- )
+ _entities = event['data'].get('entities_object', None)
+ if (
+ _entities is None or
+ _entities[0].get('link', None) == ftrack_api.symbol.NOT_SET
+ ):
+ _entities = self._get_entities(event)
return [
_entities,
event
]
+ def _get_entities(self, event):
+ self.session._local_cache.clear()
+ selection = event['data'].get('selection', [])
+ _entities = []
+ for entity in selection:
+ _entities.append(
+ self.session.get(
+ self._get_entity_type(entity),
+ entity.get('entityId')
+ )
+ )
+ event['data']['entities_object'] = _entities
+ return _entities
+
def _get_entity_type(self, entity):
'''Return translated entity type tht can be used with API.'''
# Get entity type and make sure it is lower cased. Most places except
@@ -204,7 +266,10 @@ class BaseHandler(object):
def _interface(self, *args):
interface = self.interface(*args)
if interface:
- if 'items' in interface:
+ if (
+ 'items' in interface or
+ ('success' in interface and 'message' in interface)
+ ):
return interface
return {
@@ -229,23 +294,31 @@ class BaseHandler(object):
def _handle_result(self, session, result, entities, event):
'''Validate the returned result from the action callback'''
if isinstance(result, bool):
- result = {
- 'success': result,
- 'message': (
- '{0} launched successfully.'.format(
- self.label
+ if result is True:
+ result = {
+ 'success': result,
+ 'message': (
+ '{0} launched successfully.'.format(self.label)
)
- )
- }
+ }
+ else:
+ result = {
+ 'success': result,
+ 'message': (
+ '{0} launch failed.'.format(self.label)
+ )
+ }
elif isinstance(result, dict):
- for key in ('success', 'message'):
- if key in result:
- continue
+ items = 'items' in result
+ if items is False:
+ for key in ('success', 'message'):
+ if key in result:
+ continue
- raise KeyError(
- 'Missing required key: {0}.'.format(key)
- )
+ raise KeyError(
+ 'Missing required key: {0}.'.format(key)
+ )
else:
self.log.error(
diff --git a/pype/ftrack/lib/ftrack_event_handler.py b/pype/ftrack/lib/ftrack_event_handler.py
index 2cbc3782b8..c6c91e7428 100644
--- a/pype/ftrack/lib/ftrack_event_handler.py
+++ b/pype/ftrack/lib/ftrack_event_handler.py
@@ -1,3 +1,4 @@
+import functools
from .ftrack_base_handler import BaseHandler
@@ -18,6 +19,18 @@ class BaseEvent(BaseHandler):
'''Expects a ftrack_api.Session instance'''
super().__init__(session)
+ # Decorator
+ def launch_log(self, func):
+ @functools.wraps(func)
+ def wrapper_launch(*args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except Exception as e:
+ self.log.info('{} Failed ({})'.format(
+ self.__class__.__name__, str(e))
+ )
+ return wrapper_launch
+
def register(self):
'''Registers the event, subscribing the discover and launch topics.'''
self.session.event_hub.subscribe(
@@ -27,23 +40,31 @@ class BaseEvent(BaseHandler):
)
def _launch(self, event):
- args = self._translate_event(
- self.session, event
- )
+ self.session.rollback()
+ self.session._local_cache.clear()
self.launch(
- self.session, *args
+ self.session, event
)
return
def _translate_event(self, session, event):
'''Return *event* translated structure to be used with the API.'''
- _selection = event['data'].get('entities', [])
+ return [
+ self._get_entities(session, event),
+ event
+ ]
+ def _get_entities(
+ self, session, event, ignore=['socialfeed', 'socialnotification']
+ ):
+ _selection = event['data'].get('entities', [])
_entities = list()
+ if isinstance(ignore, str):
+ ignore = list(ignore)
for entity in _selection:
- if entity['entityType'] in ['socialfeed']:
+ if entity['entityType'] in ignore:
continue
_entities.append(
(
@@ -53,8 +74,4 @@ class BaseEvent(BaseHandler):
)
)
)
-
- return [
- _entities,
- event
- ]
+ return _entities
diff --git a/pype/ftrack/login_dialog.py b/pype/ftrack/login_dialog.py
index c4011b0169..2828afe539 100644
--- a/pype/ftrack/login_dialog.py
+++ b/pype/ftrack/login_dialog.py
@@ -16,11 +16,12 @@ class Login_Dialog_ui(QtWidgets.QWidget):
buttons = []
labels = []
- def __init__(self, parent=None):
+ def __init__(self, parent=None, is_event=False):
super(Login_Dialog_ui, self).__init__()
self.parent = parent
+ self.is_event = is_event
if hasattr(parent, 'icon'):
self.setWindowIcon(self.parent.icon)
@@ -205,7 +206,7 @@ class Login_Dialog_ui(QtWidgets.QWidget):
verification = credentials._check_credentials(username, apiKey)
if verification:
- credentials._save_credentials(username, apiKey)
+ credentials._save_credentials(username, apiKey, self.is_event)
credentials._set_env(username, apiKey)
if self.parent is not None:
self.parent.loginChange()
@@ -305,7 +306,7 @@ class Login_Dialog_ui(QtWidgets.QWidget):
verification = credentials._check_credentials(username, apiKey)
if verification is True:
- credentials._save_credentials(username, apiKey)
+ credentials._save_credentials(username, apiKey, self.is_event)
credentials._set_env(username, apiKey)
if self.parent is not None:
self.parent.loginChange()
diff --git a/pype/lib.py b/pype/lib.py
index 368ddad024..43461582db 100644
--- a/pype/lib.py
+++ b/pype/lib.py
@@ -206,6 +206,11 @@ def version_up(filepath):
new_label = label.replace(version, new_version, 1)
new_basename = _rreplace(basename, label, new_label)
+ if not new_basename.endswith(new_label):
+ index = (new_basename.find(new_label))
+ index += len(new_label)
+ new_basename = new_basename[:index]
+
new_filename = "{}{}".format(new_basename, ext)
new_filename = os.path.join(dirname, new_filename)
new_filename = os.path.normpath(new_filename)
@@ -214,9 +219,10 @@ def version_up(filepath):
raise RuntimeError("Created path is the same as current file,"
"this is a bug")
- if os.path.exists(new_filename):
- log.info("Skipping existing version %s" % new_label)
- return version_up(new_filename)
+ for file in os.listdir(dirname):
+ if file.endswith(ext) and file.startswith(new_basename):
+ log.info("Skipping existing version %s" % new_label)
+ return version_up(new_filename)
log.info("New version %s" % new_label)
return new_filename
diff --git a/pype/maya/customize.py b/pype/maya/customize.py
index 872942bfd9..61d7c283d2 100644
--- a/pype/maya/customize.py
+++ b/pype/maya/customize.py
@@ -78,6 +78,8 @@ def override_toolbox_ui():
import avalon.tools.cbsceneinventory as inventory
import avalon.tools.cbloader as loader
from avalon.maya.pipeline import launch_workfiles_app
+ import mayalookassigner
+
# Ensure the maya web icon on toolbox exists
web_button = "ToolBox|MainToolboxLayout|mayaWebButton"
@@ -98,6 +100,18 @@ def override_toolbox_ui():
background_color = (0.267, 0.267, 0.267)
controls = []
+ control = mc.iconTextButton(
+ "pype_toolbox_lookmanager",
+ annotation="Look Manager",
+ label="Look Manager",
+ image=os.path.join(icons, "lookmanager.png"),
+ command=lambda: mayalookassigner.show(),
+ bgc=background_color,
+ width=icon_size,
+ height=icon_size,
+ parent=parent)
+ controls.append(control)
+
control = mc.iconTextButton(
"pype_toolbox_workfiles",
annotation="Work Files",
diff --git a/pype/maya/menu.json b/pype/maya/menu.json
index 779109a169..e16bc7d4f4 100644
--- a/pype/maya/menu.json
+++ b/pype/maya/menu.json
@@ -19,22 +19,30 @@
"title": "# Project Manager",
"tooltip": "Add assets to the project"
},
+{
+ "type": "action",
+ "command": "from pype.tools.assetcreator import app as assetcreator; assetcreator.show(context='maya')",
+ "sourcetype": "python",
+ "title": "Asset Creator",
+ "tooltip": "Open the Asset Creator"
+},
{
"type": "separator"
},
{
"type": "menu",
- "title": "# Modeling",
+ "title": "Modeling",
"items": [
{
"type": "action",
- "command": "$PYPE_SCRIPTS\\modeling\\polyDeleteOtherUVSets.py",
- "sourcetype": "file",
+ "command": "import easyTreezSource; reload(easyTreezSource); easyTreezSource.easyTreez()",
+ "sourcetype": "python",
"tags": ["modeling",
- "polygon",
- "uvset",
- "delete"],
- "title": "# Polygon Delete Other UV Sets",
+ "trees",
+ "generate",
+ "create",
+ "plants"],
+ "title": "EasyTreez",
"tooltip": ""
},
{
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_api.py b/pype/plugins/ftrack/publish/integrate_ftrack_api.py
index 9f03466af0..709a08de9c 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_api.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_api.py
@@ -1,5 +1,5 @@
import os
-
+import sys
import pyblish.api
import clique
@@ -26,15 +26,26 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
str: String query to use with "session.query"
"""
queries = []
- for key, value in data.iteritems():
- if not isinstance(value, (basestring, int)):
- self.log.info(value)
- if "id" in value.keys():
- queries.append(
- "{0}.id is \"{1}\"".format(key, value["id"])
- )
- else:
- queries.append("{0} is \"{1}\"".format(key, value))
+ if sys.version_info[0] < 3:
+ for key, value in data.iteritems():
+ if not isinstance(value, (basestring, int)):
+ self.log.info(value)
+ if "id" in value.keys():
+ queries.append(
+ "{0}.id is \"{1}\"".format(key, value["id"])
+ )
+ else:
+ queries.append("{0} is \"{1}\"".format(key, value))
+ else:
+ for key, value in data.items():
+ if not isinstance(value, (str, int)):
+ self.log.info(value)
+ if "id" in value.keys():
+ queries.append(
+ "{0}.id is \"{1}\"".format(key, value["id"])
+ )
+ else:
+ queries.append("{0} is \"{1}\"".format(key, value))
query = (
"select id from " + entitytype + " where " + " and ".join(queries)
diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
index 165d1b468d..d8e9e116f9 100644
--- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
+++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py
@@ -57,14 +57,20 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
self.log.debug('dest ext: ' + ext)
thumbnail = False
+
+
if ext in ['.mov']:
+ if not instance.data.get('startFrameReview'):
+ instance.data['startFrameReview'] = instance.data['startFrame']
+ if not instance.data.get('endFrameReview'):
+ instance.data['endFrameReview'] = instance.data['endFrame']
location = ft_session.query(
'Location where name is "ftrack.server"').one()
component_data = {
"name": "ftrackreview-mp4", # Default component name is "main".
"metadata": {'ftr_meta': json.dumps({
- 'frameIn': int(instance.data["startFrame"]),
- 'frameOut': int(instance.data["startFrame"]),
+ 'frameIn': int(instance.data['startFrameReview']),
+ 'frameOut': int(instance.data['startFrameReview']),
'frameRate': 25})}
}
elif ext in [".jpg", ".jpeg"]:
diff --git a/pype/plugins/fusion/publish/submit_deadline.py b/pype/plugins/fusion/publish/submit_deadline.py
index 6e1f405afd..30d17a4c69 100644
--- a/pype/plugins/fusion/publish/submit_deadline.py
+++ b/pype/plugins/fusion/publish/submit_deadline.py
@@ -12,7 +12,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit current Comp to Deadline
Renders are submitted to a Deadline Web Service as
- supplied via the environment variable AVALON_DEADLINE
+ supplied via the environment variable DEADLINE_REST_URL
"""
@@ -33,9 +33,9 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
from avalon.fusion.lib import get_frame_path
- AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
"http://localhost:8082")
- assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+ assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
# Collect all saver instances in context that are to be rendered
saver_instances = []
@@ -139,7 +139,7 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
- url = "{}/api/jobs".format(AVALON_DEADLINE)
+ url = "{}/api/jobs".format(DEADLINE_REST_URL)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
diff --git a/pype/plugins/global/load/open_djv.py b/pype/plugins/global/load/open_djv.py
new file mode 100644
index 0000000000..bd49d86d5f
--- /dev/null
+++ b/pype/plugins/global/load/open_djv.py
@@ -0,0 +1,114 @@
+import os
+import subprocess
+import json
+from pype import lib as pypelib
+from avalon import api
+
+
+def get_config_data():
+ path_items = [pypelib.get_presets_path(), 'djv_view', 'config.json']
+ filepath = os.path.sep.join(path_items)
+ data = dict()
+ with open(filepath) as data_file:
+ data = json.load(data_file)
+ return data
+
+
+def get_families():
+ families = []
+ paths = get_config_data().get('djv_paths', [])
+ for path in paths:
+ if os.path.exists(path):
+ families.append("*")
+ break
+ return families
+
+
+def get_representation():
+ return get_config_data().get('file_ext', [])
+
+
+class OpenInDJV(api.Loader):
+ """Open Image Sequence with system default"""
+
+ config_data = get_config_data()
+ families = get_families()
+ representations = get_representation()
+
+ label = "Open in DJV"
+ order = -10
+ icon = "play-circle"
+ color = "orange"
+
+ def load(self, context, name, namespace, data):
+ self.djv_path = None
+ paths = get_config_data().get('djv_paths', [])
+ for path in paths:
+ if os.path.exists(path):
+ self.djv_path = path
+ break
+ directory = os.path.dirname(self.fname)
+ from avalon.vendor import clique
+
+ pattern = clique.PATTERNS["frames"]
+ files = os.listdir(directory)
+ collections, remainder = clique.assemble(
+ files,
+ patterns=[pattern],
+ minimum_items=1
+ )
+
+ if not remainder:
+ seqeunce = collections[0]
+ first_image = list(seqeunce)[0]
+ # start = min(collections)
+ # end = max(collections)
+ #
+ # range = (padding % start) + '-' + (padding % end)
+ # filename = re.sub('%[0-9]*d', range, filename)
+ else:
+ first_image = self.fname
+ filepath = os.path.normpath(os.path.join(directory, first_image))
+
+ self.log.info("Opening : {}".format(filepath))
+
+ fps = context.get('project', {}).get('data', {}).get('fps', 24)
+
+ cmd = []
+ # DJV path
+ cmd.append(os.path.normpath(self.djv_path))
+ # DJV Options Start ##############################################
+ '''layer name'''
+ # cmd.append('-file_layer (value)')
+ ''' Proxy scale: 1/2, 1/4, 1/8'''
+ # cmd.append('-file_proxy 1/2')
+ ''' Cache: True, False.'''
+ cmd.append('-file_cache True')
+ ''' Start in full screen '''
+ # cmd.append('-window_fullscreen')
+ ''' Toolbar controls: False, True.'''
+ # cmd.append("-window_toolbar False")
+ ''' Window controls: False, True.'''
+ # cmd.append("-window_playbar False")
+ ''' Grid overlay: None, 1x1, 10x10, 100x100.'''
+ # cmd.append("-view_grid None")
+ ''' Heads up display: True, False.'''
+ # cmd.append("-view_hud True")
+ ''' Playback: Stop, Forward, Reverse.'''
+ cmd.append("-playback Forward")
+ ''' Frame.'''
+ # cmd.append("-playback_frame (value)")
+ cmd.append("-playback_speed " + str(fps))
+ ''' Timer: Sleep, Timeout. Value: Sleep.'''
+ # cmd.append("-playback_timer (value)")
+ ''' Timer resolution (seconds): 0.001.'''
+ # cmd.append("-playback_timer_resolution (value)")
+ ''' Time units: Timecode, Frames.'''
+ cmd.append("-time_units Frames")
+ # DJV Options End ################################################
+
+ # PATH TO COMPONENT
+ cmd.append(os.path.normpath(filepath))
+
+ # Run DJV with these commands
+ subprocess.Popen(' '.join(cmd))
diff --git a/pype/plugins/global/publish/collect_assumed_destination.py b/pype/plugins/global/publish/collect_assumed_destination.py
index d5d3d9a846..7de358b422 100644
--- a/pype/plugins/global/publish/collect_assumed_destination.py
+++ b/pype/plugins/global/publish/collect_assumed_destination.py
@@ -1,5 +1,5 @@
-import pyblish.api
import os
+import pyblish.api
from avalon import io, api
@@ -8,61 +8,11 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Collect Assumed Destination"
- order = pyblish.api.CollectorOrder + 0.499
+ order = pyblish.api.CollectorOrder + 0.498
exclude_families = ["clip"]
def process(self, instance):
- if [ef for ef in self.exclude_families
- if instance.data["family"] in ef]:
- return
-
- self.create_destination_template(instance)
-
- template_data = instance.data["assumedTemplateData"]
- # template = instance.data["template"]
-
- anatomy = instance.context.data['anatomy']
- # template = anatomy.publish.path
- anatomy_filled = anatomy.format(template_data)
- mock_template = anatomy_filled.publish.path
-
- # For now assume resources end up in a "resources" folder in the
- # published folder
- mock_destination = os.path.join(os.path.dirname(mock_template),
- "resources")
-
- # Clean the path
- mock_destination = os.path.abspath(os.path.normpath(mock_destination))
-
- # Define resource destination and transfers
- resources = instance.data.get("resources", list())
- transfers = instance.data.get("transfers", list())
- for resource in resources:
-
- # Add destination to the resource
- source_filename = os.path.basename(resource["source"])
- destination = os.path.join(mock_destination, source_filename)
-
- # Force forward slashes to fix issue with software unable
- # to work correctly with backslashes in specific scenarios
- # (e.g. escape characters in PLN-151 V-Ray UDIM)
- destination = destination.replace("\\", "/")
-
- resource['destination'] = destination
-
- # Collect transfers for the individual files of the resource
- # e.g. all individual files of a cache or UDIM textures.
- files = resource['files']
- for fsrc in files:
- fname = os.path.basename(fsrc)
- fdest = os.path.join(mock_destination, fname)
- transfers.append([fsrc, fdest])
-
- instance.data["resources"] = resources
- instance.data["transfers"] = transfers
-
- def create_destination_template(self, instance):
- """Create a filepath based on the current data available
+ """Create a destination filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
@@ -73,6 +23,9 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
Returns:
file path (str)
"""
+ if [ef for ef in self.exclude_families
+ if instance.data["family"] in ef]:
+ return
# get all the stuff from the database
subset_name = instance.data["subset"]
@@ -84,7 +37,7 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
projection={"config": True, "data": True})
template = project["config"]["template"]["publish"]
- # anatomy = instance.context.data['anatomy']
+ anatomy = instance.context.data['anatomy']
asset = io.find_one({"type": "asset",
"name": asset_name,
@@ -126,5 +79,10 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
"hierarchy": hierarchy,
"representation": "TEMP"}
- instance.data["assumedTemplateData"] = template_data
instance.data["template"] = template
+ instance.data["assumedTemplateData"] = template_data
+
+ # We take the parent folder of representation 'filepath'
+ instance.data["assumedDestination"] = os.path.dirname(
+ (anatomy.format(template_data)).publish.path
+ )
diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py
index 9ea0509783..ce4c95d465 100644
--- a/pype/plugins/global/publish/collect_filesequences.py
+++ b/pype/plugins/global/publish/collect_filesequences.py
@@ -147,11 +147,13 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
"found sequence")
raise RuntimeError("Invalid sequence")
+ fps = data.get("fps", 25)
+
# Get family from the data
families = data.get("families", ["render"])
assert isinstance(families, (list, tuple)), "Must be iterable"
assert families, "Must have at least a single family"
-
+ families.append("ftrack")
for collection in collections:
instance = context.create_instance(str(collection))
self.log.info("Collection: %s" % list(collection))
@@ -180,6 +182,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
"files": [list(collection)],
"startFrame": start,
"endFrame": end,
+ "fps": fps,
"source": data.get('source', '')
})
instance.append(collection)
diff --git a/pype/plugins/global/publish/collect_output_repre_config.py b/pype/plugins/global/publish/collect_output_repre_config.py
new file mode 100644
index 0000000000..8c63cfcc11
--- /dev/null
+++ b/pype/plugins/global/publish/collect_output_repre_config.py
@@ -0,0 +1,24 @@
+import os
+import json
+import pyblish.api
+from pype import lib as pypelib
+
+
+class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
+ """Inject the current working file into context"""
+
+ order = pyblish.api.CollectorOrder
+ label = "Collect Config for representation"
+ hosts = ["shell"]
+
+ def process(self, context):
+ config_items = [
+ pypelib.get_presets_path(),
+ "ftrack",
+ "output_representation.json"
+ ]
+ config_file = os.path.sep.join(config_items)
+ with open(config_file) as data_file:
+ config_data = json.load(data_file)
+
+ context.data['output_repre_config'] = config_data
diff --git a/pype/plugins/global/publish/extract_jpeg.py b/pype/plugins/global/publish/extract_jpeg.py
new file mode 100644
index 0000000000..7720c9d56d
--- /dev/null
+++ b/pype/plugins/global/publish/extract_jpeg.py
@@ -0,0 +1,67 @@
+import os
+import pyblish.api
+import subprocess
+from pype.vendor import clique
+
+
+class ExtractJpegEXR(pyblish.api.InstancePlugin):
+ """Resolve any dependency issies
+
+ This plug-in resolves any paths which, if not updated might break
+ the published file.
+
+ The order of families is important, when working with lookdev you want to
+ first publish the texture, update the texture paths in the nodes and then
+ publish the shading network. Same goes for file dependent assets.
+ """
+
+ label = "Extract Jpeg EXR"
+ hosts = ["shell"]
+ order = pyblish.api.ExtractorOrder
+ families = ["imagesequence", "render", "write", "source"]
+
+
+ def process(self, instance):
+ start = instance.data.get("startFrame")
+ stagingdir = os.path.normpath(instance.data.get("stagingDir"))
+
+ collected_frames = os.listdir(stagingdir)
+ collections, remainder = clique.assemble(collected_frames)
+
+ input_file = (
+ collections[0].format('{head}{padding}{tail}') % start
+ )
+ full_input_path = os.path.join(stagingdir, input_file)
+ self.log.info("input {}".format(full_input_path))
+
+ filename = collections[0].format('{head}')
+ if not filename.endswith('.'):
+ filename += "."
+ jpegFile = filename + "jpg"
+ full_output_path = os.path.join(stagingdir, jpegFile)
+
+ self.log.info("output {}".format(full_output_path))
+
+ config_data = instance.context.data['output_repre_config']
+
+ proj_name = os.environ.get('AVALON_PROJECT', '__default__')
+ profile = config_data.get(proj_name, config_data['__default__'])
+
+ jpeg_items = []
+ jpeg_items.append("ffmpeg")
+ # override file if already exists
+ jpeg_items.append("-y")
+ # use same input args like with mov
+ jpeg_items.extend(profile.get('input', []))
+ # input file
+ jpeg_items.append("-i {}".format(full_input_path))
+ # output file
+ jpeg_items.append(full_output_path)
+
+ subprocess_jpeg = " ".join(jpeg_items)
+ sub_proc = subprocess.Popen(subprocess_jpeg)
+ sub_proc.wait()
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+ instance.data["files"].append(jpegFile)
diff --git a/pype/plugins/global/publish/extract_quicktime.py b/pype/plugins/global/publish/extract_quicktime.py
new file mode 100644
index 0000000000..621078e3c0
--- /dev/null
+++ b/pype/plugins/global/publish/extract_quicktime.py
@@ -0,0 +1,75 @@
+import os
+import pyblish.api
+import subprocess
+from pype.vendor import clique
+
+
+class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
+ """Resolve any dependency issies
+
+ This plug-in resolves any paths which, if not updated might break
+ the published file.
+
+ The order of families is important, when working with lookdev you want to
+ first publish the texture, update the texture paths in the nodes and then
+ publish the shading network. Same goes for file dependent assets.
+ """
+
+ label = "Extract Quicktime EXR"
+ order = pyblish.api.ExtractorOrder
+ families = ["imagesequence", "render", "write", "source"]
+ hosts = ["shell"]
+
+ def process(self, instance):
+ fps = instance.data.get("fps")
+ start = instance.data.get("startFrame")
+ stagingdir = os.path.normpath(instance.data.get("stagingDir"))
+
+ collected_frames = os.listdir(stagingdir)
+ collections, remainder = clique.assemble(collected_frames)
+
+ full_input_path = os.path.join(
+ stagingdir, collections[0].format('{head}{padding}{tail}')
+ )
+ self.log.info("input {}".format(full_input_path))
+
+ filename = collections[0].format('{head}')
+ if not filename.endswith('.'):
+ filename += "."
+ movFile = filename + "mov"
+ full_output_path = os.path.join(stagingdir, movFile)
+
+ self.log.info("output {}".format(full_output_path))
+
+ config_data = instance.context.data['output_repre_config']
+
+ proj_name = os.environ.get('AVALON_PROJECT', '__default__')
+ profile = config_data.get(proj_name, config_data['__default__'])
+
+ input_args = []
+ # overrides output file
+ input_args.append("-y")
+ # preset's input data
+ input_args.extend(profile.get('input', []))
+ # necessary input data
+ input_args.append("-i {}".format(full_input_path))
+ input_args.append("-framerate {}".format(fps))
+ input_args.append("-start_number {}".format(start))
+
+ output_args = []
+ # preset's output data
+ output_args.extend(profile.get('output', []))
+ # output filename
+ output_args.append(full_output_path)
+ mov_args = [
+ "ffmpeg",
+ " ".join(input_args),
+ " ".join(output_args)
+ ]
+ subprocess_mov = " ".join(mov_args)
+ sub_proc = subprocess.Popen(subprocess_mov)
+ sub_proc.wait()
+
+ if "files" not in instance.data:
+ instance.data["files"] = list()
+ instance.data["files"].append(movFile)
diff --git a/pype/plugins/global/publish/integrate.py b/pype/plugins/global/publish/integrate.py
index cd20ba0837..00096a95ee 100644
--- a/pype/plugins/global/publish/integrate.py
+++ b/pype/plugins/global/publish/integrate.py
@@ -5,6 +5,7 @@ import shutil
import errno
import pyblish.api
from avalon import api, io
+from avalon.vendor import filelink
log = logging.getLogger(__name__)
@@ -31,12 +32,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"pointcache",
"vdbcache",
"setdress",
+ "assembly",
+ "layout",
"rig",
"vrayproxy",
"yetiRig",
"yeticache",
"nukescript",
"review",
+ "workfile",
"scene",
"ass"]
exclude_families = ["clip"]
@@ -88,6 +92,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
self.log.debug("Establishing staging directory @ %s" % stagingdir)
+ # Ensure at least one file is set up for transfer in staging dir.
+ files = instance.data.get("files", [])
+ assert files, "Instance has no files to transfer"
+ assert isinstance(files, (list, tuple)), (
+ "Instance 'files' must be a list, got: {0}".format(files)
+ )
+
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset",
@@ -167,6 +178,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
+ if 'transfers' not in instance.data:
+ instance.data['transfers'] = []
for files in instance.data["files"]:
@@ -268,12 +281,22 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
instance: the instance to integrate
"""
- transfers = instance.data["transfers"]
+ transfers = instance.data.get("transfers", list())
for src, dest in transfers:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
+ # Produce hardlinked copies
+ # Note: hardlink can only be produced between two files on the same
+ # server/disk and editing one of the two will edit both files at once.
+ # As such it is recommended to only make hardlinks between static files
+ # to ensure publishes remain safe and non-edited.
+ hardlinks = instance.data.get("hardlinks", list())
+ for src, dest in hardlinks:
+ self.log.info("Hardlinking file .. {} -> {}".format(src, dest))
+ self.hardlink_file(src, dest)
+
def copy_file(self, src, dst):
""" Copy given source to destination
@@ -296,6 +319,20 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
shutil.copy(src, dst)
+ def hardlink_file(self, src, dst):
+
+ dirname = os.path.dirname(dst)
+ try:
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ pass
+ else:
+ self.log.critical("An unexpected error occurred.")
+ raise
+
+ filelink.create(src, dst, filelink.HARDLINK)
+
def get_subset(self, asset, instance):
subset = io.find_one({"type": "subset",
@@ -359,7 +396,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
families.append(instance_family)
families += current_families
- self.log.debug("Registered roor: {}".format(api.registered_root()))
+ self.log.debug("Registered root: {}".format(api.registered_root()))
# create relative source path for DB
try:
source = instance.data['source']
@@ -379,7 +416,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"fps": context.data.get("fps")}
# Include optional data if present in
- optionals = ["startFrame", "endFrame", "step", "handles"]
+ optionals = [
+ "startFrame", "endFrame", "step", "handles", "sourceHashes"
+ ]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data[key]
diff --git a/pype/plugins/global/publish/integrate_rendered_frames.py b/pype/plugins/global/publish/integrate_rendered_frames.py
index ae11d33348..8e7e2a59c4 100644
--- a/pype/plugins/global/publish/integrate_rendered_frames.py
+++ b/pype/plugins/global/publish/integrate_rendered_frames.py
@@ -168,6 +168,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
representations = []
destination_list = []
+ if 'transfers' not in instance.data:
+ instance.data['transfers'] = []
+
for files in instance.data["files"]:
# Collection
# _______
@@ -240,7 +243,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
instance.data["transfers"].append([src, dst])
- template_data["frame"] = "#####"
+ template_data["frame"] = "#" * anatomy.render.padding
anatomy_filled = anatomy.format(template_data)
path_to_save = anatomy_filled.render.path
template = anatomy.render.fullpath
diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py
index 8895e3626a..c7352ba082 100644
--- a/pype/plugins/global/publish/submit_publish_job.py
+++ b/pype/plugins/global/publish/submit_publish_job.py
@@ -98,7 +98,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
plug-in.
Renders are submitted to a Deadline Web Service as
- supplied via the environment variable AVALON_DEADLINE
+ supplied via the environment variable DEADLINE_REST_URL
Options in instance.data:
- deadlineSubmission (dict, Required): The returned .json
@@ -126,16 +126,16 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
hosts = ["fusion", "maya", "nuke"]
families = [
- "render.deadline",
+ "render.farm",
"renderlayer",
"imagesequence"
]
def process(self, instance):
- AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
"http://localhost:8082")
- assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+ assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
# try:
# deadline_url = os.environ["DEADLINE_REST_URL"]
@@ -192,6 +192,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
"regex": regex,
"startFrame": start,
"endFrame": end,
+ "fps": context.data.get("fps", None),
"families": ["render"],
"source": source,
"user": context.data["user"],
@@ -326,7 +327,7 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
- url = "{}/api/jobs".format(AVALON_DEADLINE)
+ url = "{}/api/jobs".format(DEADLINE_REST_URL)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
diff --git a/pype/plugins/launcher/actions/AssetCreator.py b/pype/plugins/launcher/actions/AssetCreator.py
new file mode 100644
index 0000000000..ff06895ae0
--- /dev/null
+++ b/pype/plugins/launcher/actions/AssetCreator.py
@@ -0,0 +1,36 @@
+import os
+import sys
+import acre
+
+from avalon import api, lib
+from pype.tools import assetcreator
+
+from pype.api import Logger
+
+log = Logger.getLogger(__name__, "asset_creator")
+
+
+class AssetCreator(api.Action):
+
+ name = "asset_creator"
+ label = "Asset Creator"
+ icon = "plus-square"
+ order = 250
+
+ def is_compatible(self, session):
+ """Return whether the action is compatible with the session"""
+ if "AVALON_PROJECT" in session:
+ return True
+ return False
+
+ def process(self, session, **kwargs):
+ asset = ''
+ if 'AVALON_ASSET' in session:
+ asset = session['AVALON_ASSET']
+ return lib.launch(
+ executable="python",
+ args=[
+ "-u", "-m", "pype.tools.assetcreator",
+ session['AVALON_PROJECT'], asset
+ ]
+ )
diff --git a/pype/plugins/launcher/actions/ClockifyStart.py b/pype/plugins/launcher/actions/ClockifyStart.py
new file mode 100644
index 0000000000..d0d1bb48f3
--- /dev/null
+++ b/pype/plugins/launcher/actions/ClockifyStart.py
@@ -0,0 +1,42 @@
+from avalon import api, io
+from pype.clockify import ClockifyAPI
+from pype.api import Logger
+log = Logger.getLogger(__name__, "clockify_start")
+
+
+class ClockifyStart(api.Action):
+
+ name = "clockify_start_timer"
+ label = "Clockify - Start Timer"
+ icon = "clockify_icon"
+ order = 500
+ clockapi = ClockifyAPI()
+
+ def is_compatible(self, session):
+ """Return whether the action is compatible with the session"""
+ if "AVALON_TASK" in session:
+ return True
+ return False
+
+ def process(self, session, **kwargs):
+ project_name = session['AVALON_PROJECT']
+ asset_name = session['AVALON_ASSET']
+ task_name = session['AVALON_TASK']
+
+ description = asset_name
+ asset = io.find_one({
+ 'type': 'asset',
+ 'name': asset_name
+ })
+ if asset is not None:
+ desc_items = asset.get('data', {}).get('parents', [])
+ desc_items.append(asset_name)
+ desc_items.append(task_name)
+ description = '/'.join(desc_items)
+
+ project_id = self.clockapi.get_project_id(project_name)
+ tag_ids = []
+ tag_ids.append(self.clockapi.get_tag_id(task_name))
+ self.clockapi.start_time_entry(
+ description, project_id, tag_ids=tag_ids
+ )
diff --git a/pype/plugins/launcher/actions/ClockifySync.py b/pype/plugins/launcher/actions/ClockifySync.py
new file mode 100644
index 0000000000..d8c69bc768
--- /dev/null
+++ b/pype/plugins/launcher/actions/ClockifySync.py
@@ -0,0 +1,57 @@
+from avalon import api, io
+from pype.clockify import ClockifyAPI
+from pype.api import Logger
+log = Logger.getLogger(__name__, "clockify_sync")
+
+
+class ClockifySync(api.Action):
+
+ name = "sync_to_clockify"
+ label = "Sync to Clockify"
+ icon = "clockify_white_icon"
+ order = 500
+ clockapi = ClockifyAPI()
+ have_permissions = clockapi.validate_workspace_perm()
+
+ def is_compatible(self, session):
+ """Return whether the action is compatible with the session"""
+ return self.have_permissions
+
+ def process(self, session, **kwargs):
+ project_name = session.get('AVALON_PROJECT', None)
+
+ projects_to_sync = []
+ if project_name.strip() == '' or project_name is None:
+ for project in io.projects():
+ projects_to_sync.append(project)
+ else:
+ project = io.find_one({'type': 'project'})
+ projects_to_sync.append(project)
+
+ projects_info = {}
+ for project in projects_to_sync:
+ task_types = [task['name'] for task in project['config']['tasks']]
+ projects_info[project['name']] = task_types
+
+ clockify_projects = self.clockapi.get_projects()
+ for project_name, task_types in projects_info.items():
+ if project_name not in clockify_projects:
+ response = self.clockapi.add_project(project_name)
+ if 'id' not in response:
+ self.log.error('Project {} can\'t be created'.format(
+ project_name
+ ))
+ continue
+ project_id = response['id']
+ else:
+ project_id = clockify_projects[project_name]
+
+ clockify_workspace_tags = self.clockapi.get_tags()
+ for task_type in task_types:
+ if task_type not in clockify_workspace_tags:
+ response = self.clockapi.add_tag(task_type)
+ if 'id' not in response:
+ self.log.error('Task {} can\'t be created'.format(
+ task_type
+ ))
+ continue
diff --git a/pype/plugins/maya/create/create_animation.py b/pype/plugins/maya/create/create_animation.py
index 5eef2ac225..eaa54a764d 100644
--- a/pype/plugins/maya/create/create_animation.py
+++ b/pype/plugins/maya/create/create_animation.py
@@ -9,6 +9,7 @@ class CreateAnimation(avalon.maya.Creator):
label = "Animation"
family = "animation"
icon = "male"
+ defaults = ['Main']
def __init__(self, *args, **kwargs):
super(CreateAnimation, self).__init__(*args, **kwargs)
diff --git a/pype/plugins/maya/create/create_ass.py b/pype/plugins/maya/create/create_ass.py
index 3423648c40..84b42e9b20 100644
--- a/pype/plugins/maya/create/create_ass.py
+++ b/pype/plugins/maya/create/create_ass.py
@@ -12,6 +12,7 @@ class CreateAss(avalon.maya.Creator):
label = "Ass StandIn"
family = "ass"
icon = "cube"
+ defaults = ['Main']
def process(self):
instance = super(CreateAss, self).process()
diff --git a/pype/plugins/maya/create/create_assembly.py b/pype/plugins/maya/create/create_assembly.py
new file mode 100644
index 0000000000..2a00d4a29a
--- /dev/null
+++ b/pype/plugins/maya/create/create_assembly.py
@@ -0,0 +1,11 @@
+import avalon.maya
+
+
+class CreateAssembly(avalon.maya.Creator):
+ """A grouped package of loaded content"""
+
+ name = "assembly"
+ label = "Assembly"
+ family = "assembly"
+ icon = "boxes"
+ defaults = ['Main']
diff --git a/pype/plugins/maya/create/create_camera.py b/pype/plugins/maya/create/create_camera.py
index 16293deb57..c46416d475 100644
--- a/pype/plugins/maya/create/create_camera.py
+++ b/pype/plugins/maya/create/create_camera.py
@@ -9,6 +9,7 @@ class CreateCamera(avalon.maya.Creator):
label = "Camera"
family = "camera"
icon = "video-camera"
+ defaults = ['Main']
def __init__(self, *args, **kwargs):
super(CreateCamera, self).__init__(*args, **kwargs)
diff --git a/pype/plugins/maya/create/create_layout.py b/pype/plugins/maya/create/create_layout.py
new file mode 100644
index 0000000000..3f6dd5d769
--- /dev/null
+++ b/pype/plugins/maya/create/create_layout.py
@@ -0,0 +1,11 @@
+import avalon.maya
+
+
+class CreateLayout(avalon.maya.Creator):
+ """A grouped package of loaded content"""
+
+ name = "layoutMain"
+ label = "Layout"
+ family = "layout"
+ icon = "boxes"
+ defaults = ["Main"]
diff --git a/pype/plugins/maya/create/create_look.py b/pype/plugins/maya/create/create_look.py
index 23e4f034b2..299fbafe02 100644
--- a/pype/plugins/maya/create/create_look.py
+++ b/pype/plugins/maya/create/create_look.py
@@ -9,8 +9,12 @@ class CreateLook(avalon.maya.Creator):
label = "Look"
family = "look"
icon = "paint-brush"
+ defaults = ['Main']
def __init__(self, *args, **kwargs):
super(CreateLook, self).__init__(*args, **kwargs)
self.data["renderlayer"] = lib.get_current_renderlayer()
+
+ # Whether to automatically convert the textures to .tx upon publish.
+ self.data["maketx"] = True
diff --git a/pype/plugins/maya/create/create_mayaascii.py b/pype/plugins/maya/create/create_mayaascii.py
index bf55c3d578..e7cc40dc24 100644
--- a/pype/plugins/maya/create/create_mayaascii.py
+++ b/pype/plugins/maya/create/create_mayaascii.py
@@ -8,3 +8,4 @@ class CreateMayaAscii(avalon.maya.Creator):
label = "Maya Ascii"
family = "mayaAscii"
icon = "file-archive-o"
+ defaults = ['Main']
diff --git a/pype/plugins/maya/create/create_model.py b/pype/plugins/maya/create/create_model.py
index 449a5642be..a992d84585 100644
--- a/pype/plugins/maya/create/create_model.py
+++ b/pype/plugins/maya/create/create_model.py
@@ -4,10 +4,11 @@ import avalon.maya
class CreateModel(avalon.maya.Creator):
"""Polygonal static geometry"""
- name = "modelDefault"
+ name = "modelMain"
label = "Model"
family = "model"
icon = "cube"
+ defaults = ["Main", "Proxy"]
def __init__(self, *args, **kwargs):
super(CreateModel, self).__init__(*args, **kwargs)
diff --git a/pype/plugins/maya/create/create_pointcache.py b/pype/plugins/maya/create/create_pointcache.py
index e5c82f7e10..1d66bb63fc 100644
--- a/pype/plugins/maya/create/create_pointcache.py
+++ b/pype/plugins/maya/create/create_pointcache.py
@@ -9,6 +9,7 @@ class CreatePointCache(avalon.maya.Creator):
label = "Point Cache"
family = "pointcache"
icon = "gears"
+ defaults = ['Main']
def __init__(self, *args, **kwargs):
super(CreatePointCache, self).__init__(*args, **kwargs)
diff --git a/pype/plugins/maya/create/create_renderglobals.py b/pype/plugins/maya/create/create_renderglobals.py
index 2ecc6cd0cd..a77180160e 100644
--- a/pype/plugins/maya/create/create_renderglobals.py
+++ b/pype/plugins/maya/create/create_renderglobals.py
@@ -4,14 +4,15 @@ import pype.maya.lib as lib
from avalon.vendor import requests
import avalon.maya
-# from avalon import api
import os
+
class CreateRenderGlobals(avalon.maya.Creator):
label = "Render Globals"
family = "renderglobals"
icon = "gears"
+ defaults = ['Main']
def __init__(self, *args, **kwargs):
super(CreateRenderGlobals, self).__init__(*args, **kwargs)
@@ -19,19 +20,23 @@ class CreateRenderGlobals(avalon.maya.Creator):
# We won't be publishing this one
self.data["id"] = "avalon.renderglobals"
- # Get available Deadline pools
- try:
- AVALON_DEADLINE = os.environ["AVALON_DEADLINE"]
- except KeyError:
- self.log.error("Deadline REST API url not found.")
+ # get pools
+ pools = []
- argument = "{}/api/pools?NamesOnly=true".format(AVALON_DEADLINE)
- response = requests.get(argument)
- if not response.ok:
- self.log.warning("No pools retrieved")
- pools = []
+ deadline_url = os.environ.get('DEADLINE_REST_URL', None)
+ if deadline_url is None:
+ self.log.warning("Deadline REST API url not found.")
else:
- pools = response.json()
+ argument = "{}/api/pools?NamesOnly=true".format(deadline_url)
+ response = requests.get(argument)
+ if not response.ok:
+ self.log.warning("No pools retrieved")
+ else:
+ pools = response.json()
+ self.data["primaryPool"] = pools
+ # We add a string "-" to allow the user to not
+ # set any secondary pools
+ self.data["secondaryPool"] = ["-"] + pools
# We don't need subset or asset attributes
# self.data.pop("subset", None)
@@ -47,9 +52,6 @@ class CreateRenderGlobals(avalon.maya.Creator):
self.data["whitelist"] = False
self.data["machineList"] = ""
self.data["useMayaBatch"] = True
- self.data["primaryPool"] = pools
- # We add a string "-" to allow the user to not set any secondary pools
- self.data["secondaryPool"] = ["-"] + pools
self.options = {"useSelection": False} # Force no content
diff --git a/pype/plugins/maya/create/create_review.py b/pype/plugins/maya/create/create_review.py
index bf2fe3088f..fcd07b7f61 100644
--- a/pype/plugins/maya/create/create_review.py
+++ b/pype/plugins/maya/create/create_review.py
@@ -10,6 +10,7 @@ class CreateReview(avalon.maya.Creator):
label = "Review"
family = "review"
icon = "video-camera"
+ defaults = ['Main']
def __init__(self, *args, **kwargs):
super(CreateReview, self).__init__(*args, **kwargs)
diff --git a/pype/plugins/maya/create/create_rig.py b/pype/plugins/maya/create/create_rig.py
index 2c8f3d78b9..ff5c52ed3c 100644
--- a/pype/plugins/maya/create/create_rig.py
+++ b/pype/plugins/maya/create/create_rig.py
@@ -11,6 +11,7 @@ class CreateRig(avalon.maya.Creator):
label = "Rig"
family = "rig"
icon = "wheelchair"
+ defaults = ['Main']
def process(self):
diff --git a/pype/plugins/maya/create/create_setdress.py b/pype/plugins/maya/create/create_setdress.py
index 79f08ca04c..079ccbd029 100644
--- a/pype/plugins/maya/create/create_setdress.py
+++ b/pype/plugins/maya/create/create_setdress.py
@@ -4,7 +4,8 @@ import avalon.maya
class CreateSetDress(avalon.maya.Creator):
"""A grouped package of loaded content"""
- name = "setdress"
+ name = "setdressMain"
label = "Set Dress"
family = "setdress"
- icon = "cubes"
\ No newline at end of file
+ icon = "boxes"
+ defaults = ["Main", "Anim"]
diff --git a/pype/plugins/maya/load/load_alembic.py b/pype/plugins/maya/load/load_alembic.py
index 9e08702521..9fd4aa2108 100644
--- a/pype/plugins/maya/load/load_alembic.py
+++ b/pype/plugins/maya/load/load_alembic.py
@@ -1,4 +1,6 @@
import pype.maya.plugin
+import os
+import json
class AbcLoader(pype.maya.plugin.ReferenceLoader):
@@ -16,6 +18,12 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "animation"
+
+ groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
@@ -25,6 +33,23 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader):
reference=True,
returnNewNodes=True)
+ cmds.makeIdentity(groupName, apply=False, rotate=True,
+ translate=True, scale=True)
+
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
+
self[:] = nodes
return nodes
diff --git a/pype/plugins/maya/load/load_ass.py b/pype/plugins/maya/load/load_ass.py
index b27cd20b5b..c268ce70c5 100644
--- a/pype/plugins/maya/load/load_ass.py
+++ b/pype/plugins/maya/load/load_ass.py
@@ -2,6 +2,7 @@ from avalon import api
import pype.maya.plugin
import os
import pymel.core as pm
+import json
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
@@ -21,6 +22,11 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
from avalon import maya
import pymel.core as pm
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "ass"
+
with maya.maintained_selection():
groupName = "{}:{}".format(namespace, name)
@@ -34,7 +40,8 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
groupReference=True,
groupName=groupName)
- cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True)
+ cmds.makeIdentity(groupName, apply=False, rotate=True,
+ translate=True, scale=True)
# Set attributes
proxyShape = pm.ls(nodes, type="mesh")[0]
@@ -43,6 +50,19 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
proxyShape.dso.set(path)
proxyShape.aiOverrideShaders.set(0)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
self[:] = nodes
@@ -132,7 +152,6 @@ class AssStandinLoader(api.Loader):
import mtoa.ui.arnoldmenu
import pymel.core as pm
-
asset = context['asset']['name']
namespace = namespace or lib.unique_namespace(
asset + "_",
@@ -146,6 +165,20 @@ class AssStandinLoader(api.Loader):
label = "{}:{}".format(namespace, name)
root = pm.group(name=label, empty=True)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get('ass')
+ if c is not None:
+ cmds.setAttr(root + ".useOutlinerColor", 1)
+ cmds.setAttr(root + ".outlinerColor",
+ c[0], c[1], c[2])
+
# Create transform with shape
transform_name = label + "_ASS"
# transform = pm.createNode("transform", name=transform_name,
@@ -160,10 +193,6 @@ class AssStandinLoader(api.Loader):
# Set the standin filepath
standinShape.dso.set(self.fname)
-
- # Lock parenting of the transform and standin
- cmds.lockNode([root, standin], lock=True)
-
nodes = [root, standin]
self[:] = nodes
diff --git a/pype/plugins/maya/load/load_setdress.py b/pype/plugins/maya/load/load_assembly.py
similarity index 95%
rename from pype/plugins/maya/load/load_setdress.py
rename to pype/plugins/maya/load/load_assembly.py
index 40785613be..4f72ff9e13 100644
--- a/pype/plugins/maya/load/load_setdress.py
+++ b/pype/plugins/maya/load/load_assembly.py
@@ -1,9 +1,9 @@
from avalon import api
-class SetDressLoader(api.Loader):
+class AssemblyLoader(api.Loader):
- families = ["setdress"]
+ families = ["assembly"]
representations = ["json"]
label = "Load Set Dress"
@@ -77,4 +77,4 @@ class SetDressLoader(api.Loader):
members = cmds.sets(container['objectName'], query=True) or []
cmds.delete([container['objectName']] + members)
- # TODO: Ensure namespace is gone
\ No newline at end of file
+ # TODO: Ensure namespace is gone
diff --git a/pype/plugins/maya/load/load_camera.py b/pype/plugins/maya/load/load_camera.py
index eb75c3a63d..989e80e979 100644
--- a/pype/plugins/maya/load/load_camera.py
+++ b/pype/plugins/maya/load/load_camera.py
@@ -1,4 +1,6 @@
import pype.maya.plugin
+import os
+import json
class CameraLoader(pype.maya.plugin.ReferenceLoader):
@@ -16,7 +18,13 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
# Get family type from the context
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "camera"
+
cmds.loadPlugin("AbcImport.mll", quiet=True)
+ groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
@@ -27,6 +35,20 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader):
cameras = cmds.ls(nodes, type="camera")
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
+
# Check the Maya version, lockTransform has been introduced since
# Maya 2016.5 Ext 2
version = int(cmds.about(version=True))
diff --git a/pype/plugins/maya/load/load_fbx.py b/pype/plugins/maya/load/load_fbx.py
index 2ee3e5fdbd..b580257334 100644
--- a/pype/plugins/maya/load/load_fbx.py
+++ b/pype/plugins/maya/load/load_fbx.py
@@ -1,4 +1,6 @@
import pype.maya.plugin
+import os
+import json
class FBXLoader(pype.maya.plugin.ReferenceLoader):
@@ -17,6 +19,11 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
from avalon import maya
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "fbx"
+
# Ensure FBX plug-in is loaded
cmds.loadPlugin("fbxmaya", quiet=True)
@@ -28,6 +35,21 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader):
groupReference=True,
groupName="{}:{}".format(namespace, name))
+ groupName = "{}:{}".format(namespace, name)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
+
self[:] = nodes
return nodes
diff --git a/pype/plugins/maya/load/load_look.py b/pype/plugins/maya/load/load_look.py
index eede195a49..b1c88bcd18 100644
--- a/pype/plugins/maya/load/load_look.py
+++ b/pype/plugins/maya/load/load_look.py
@@ -1,4 +1,8 @@
import pype.maya.plugin
+from avalon import api, io
+import json
+import pype.maya.lib
+from collections import defaultdict
class LookLoader(pype.maya.plugin.ReferenceLoader):
@@ -14,7 +18,7 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
def process_reference(self, context, name, namespace, data):
"""
- Load and try to ssign Lookdev to nodes based on relationship data
+ Load and try to assign Lookdev to nodes based on relationship data
Args:
name:
namespace:
@@ -38,3 +42,99 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
def switch(self, container, representation):
self.update(container, representation)
+
+ def update(self, container, representation):
+
+ import os
+ from maya import cmds
+
+ node = container["objectName"]
+
+ path = api.get_representation_path(representation)
+
+ # Get reference node from container members
+ members = cmds.sets(node, query=True, nodesOnly=True)
+ reference_node = self._get_reference_node(members)
+
+ file_type = {
+ "ma": "mayaAscii",
+ "mb": "mayaBinary",
+ "abc": "Alembic"
+ }.get(representation["name"])
+
+ assert file_type, "Unsupported representation: %s" % representation
+
+ assert os.path.exists(path), "%s does not exist." % path
+
+ try:
+ content = cmds.file(path,
+ loadReference=reference_node,
+ type=file_type,
+ returnNewNodes=True)
+ except RuntimeError as exc:
+ # When changing a reference to a file that has load errors the
+ # command will raise an error even if the file is still loaded
+ # correctly (e.g. when raising errors on Arnold attributes)
+ # When the file is loaded and has content, we consider it's fine.
+ if not cmds.referenceQuery(reference_node, isLoaded=True):
+ raise
+
+ content = cmds.referenceQuery(reference_node,
+ nodes=True,
+ dagPath=True)
+ if not content:
+ raise
+
+ self.log.warning("Ignoring file read error:\n%s", exc)
+
+ # Fix PLN-40 for older containers created with Avalon that had the
+ # `.verticesOnlySet` set to True.
+ if cmds.getAttr("{}.verticesOnlySet".format(node)):
+ self.log.info("Setting %s.verticesOnlySet to False", node)
+ cmds.setAttr("{}.verticesOnlySet".format(node), False)
+
+ # Add new nodes of the reference to the container
+ cmds.sets(content, forceElement=node)
+
+ # Remove any placeHolderList attribute entries from the set that
+ # are remaining from nodes being removed from the referenced file.
+ members = cmds.sets(node, query=True)
+ invalid = [x for x in members if ".placeHolderList" in x]
+ if invalid:
+ cmds.sets(invalid, remove=node)
+
+ # Get container members
+ shader_nodes = cmds.ls(members, type='shadingEngine')
+
+ nodes_list = []
+ for shader in shader_nodes:
+ connections = cmds.listConnections(cmds.listHistory(shader, f=1),
+ type='mesh')
+ if connections:
+ for connection in connections:
+ nodes_list.extend(cmds.listRelatives(connection,
+ shapes=True))
+ nodes = set(nodes_list)
+
+ json_representation = io.find_one({"type": "representation",
+ "parent": representation['parent'],
+ "name": "json"})
+
+ # Load relationships
+ shader_relation = api.get_representation_path(json_representation)
+ with open(shader_relation, "r") as f:
+ relationships = json.load(f)
+
+ attributes = relationships.get("attributes", [])
+
+ # region compute lookup
+ nodes_by_id = defaultdict(list)
+ for n in nodes:
+ nodes_by_id[pype.maya.lib.get_id(n)].append(n)
+
+ pype.maya.lib.apply_attributes(attributes, nodes_by_id)
+
+ # Update metadata
+ cmds.setAttr("{}.representation".format(node),
+ str(representation["_id"]),
+ type="string")
diff --git a/pype/plugins/maya/load/load_mayaascii.py b/pype/plugins/maya/load/load_mayaascii.py
index 283ecbd3a2..549d1dff4c 100644
--- a/pype/plugins/maya/load/load_mayaascii.py
+++ b/pype/plugins/maya/load/load_mayaascii.py
@@ -1,10 +1,14 @@
import pype.maya.plugin
+import json
+import os
class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
"""Load the model"""
- families = ["mayaAscii"]
+ families = ["mayaAscii",
+ "setdress",
+ "layout"]
representations = ["ma"]
label = "Reference Maya Ascii"
@@ -17,6 +21,11 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
from avalon import maya
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "model"
+
with maya.maintained_selection():
nodes = cmds.file(self.fname,
namespace=namespace,
@@ -26,6 +35,20 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
groupName="{}:{}".format(namespace, name))
self[:] = nodes
+ groupName = "{}:{}".format(namespace, name)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
return nodes
diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py
index 82bc8c1ce9..16f3556de7 100644
--- a/pype/plugins/maya/load/load_model.py
+++ b/pype/plugins/maya/load/load_model.py
@@ -1,5 +1,7 @@
from avalon import api
import pype.maya.plugin
+import json
+import os
class ModelLoader(pype.maya.plugin.ReferenceLoader):
@@ -7,6 +9,7 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
families = ["model"]
representations = ["ma"]
+ tool_names = ["loader"]
label = "Reference Model"
order = -10
@@ -18,13 +21,36 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
from avalon import maya
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "model"
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
with maya.maintained_selection():
+
+ groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
- groupName="{}:{}".format(namespace, name))
+ groupName=groupName)
+
+ cmds.makeIdentity(groupName, apply=False, rotate=True,
+ translate=True, scale=True)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
self[:] = nodes
@@ -63,6 +89,19 @@ class GpuCacheLoader(api.Loader):
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get('model')
+ if c is not None:
+ cmds.setAttr(root + ".useOutlinerColor", 1)
+ cmds.setAttr(root + ".outlinerColor",
+ c[0], c[1], c[2])
# Create transform with shape
transform_name = label + "_GPU"
@@ -124,11 +163,13 @@ class GpuCacheLoader(api.Loader):
except RuntimeError:
pass
+
class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
"""Specific loader of Alembic for the studio.animation family"""
families = ["model"]
representations = ["abc"]
+ tool_names = ["loader"]
label = "Reference Model"
order = -10
@@ -139,15 +180,36 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
+ groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
- groupName="{}:{}".format(namespace, name),
+ groupName=groupName,
reference=True,
returnNewNodes=True)
+ namespace = cmds.referenceQuery(nodes[0], namespace=True)
+ groupName = "{}:{}".format(namespace, name)
+
+ cmds.makeIdentity(groupName, apply=False, rotate=True,
+ translate=True, scale=True)
+
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get('model')
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
+
self[:] = nodes
return nodes
diff --git a/pype/plugins/maya/load/load_rig.py b/pype/plugins/maya/load/load_rig.py
index aa40ca3cc2..1dcff45bb9 100644
--- a/pype/plugins/maya/load/load_rig.py
+++ b/pype/plugins/maya/load/load_rig.py
@@ -2,6 +2,8 @@ from maya import cmds
import pype.maya.plugin
from avalon import api, maya
+import os
+import json
class RigLoader(pype.maya.plugin.ReferenceLoader):
@@ -21,12 +23,35 @@ class RigLoader(pype.maya.plugin.ReferenceLoader):
def process_reference(self, context, name, namespace, data):
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "rig"
+
+ groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
- groupName="{}:{}".format(namespace, name))
+ groupName=groupName)
+
+ cmds.makeIdentity(groupName, apply=False, rotate=True,
+ translate=True, scale=True)
+
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
# Store for post-process
self[:] = nodes
diff --git a/pype/plugins/maya/load/load_vdb_to_redshift.py b/pype/plugins/maya/load/load_vdb_to_redshift.py
index 8ff8bc0326..169c3bf34a 100644
--- a/pype/plugins/maya/load/load_vdb_to_redshift.py
+++ b/pype/plugins/maya/load/load_vdb_to_redshift.py
@@ -1,4 +1,6 @@
from avalon import api
+import os
+import json
class LoadVDBtoRedShift(api.Loader):
@@ -17,6 +19,11 @@ class LoadVDBtoRedShift(api.Loader):
import avalon.maya.lib as lib
from avalon.maya.pipeline import containerise
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "vdbcache"
+
# Check if the plugin for redshift is available on the pc
try:
cmds.loadPlugin("redshift4maya", quiet=True)
@@ -48,6 +55,19 @@ class LoadVDBtoRedShift(api.Loader):
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(root + ".useOutlinerColor", 1)
+ cmds.setAttr(root + ".outlinerColor",
+ c[0], c[1], c[2])
# Create VR
volume_node = cmds.createNode("RedshiftVolumeShape",
diff --git a/pype/plugins/maya/load/load_vdb_to_vray.py b/pype/plugins/maya/load/load_vdb_to_vray.py
index ac20b0eb43..58d6d1b56e 100644
--- a/pype/plugins/maya/load/load_vdb_to_vray.py
+++ b/pype/plugins/maya/load/load_vdb_to_vray.py
@@ -1,4 +1,6 @@
from avalon import api
+import json
+import os
class LoadVDBtoVRay(api.Loader):
@@ -16,6 +18,11 @@ class LoadVDBtoVRay(api.Loader):
import avalon.maya.lib as lib
from avalon.maya.pipeline import containerise
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "vdbcache"
+
# Check if viewport drawing engine is Open GL Core (compat)
render_engine = None
compatible = "OpenGLCoreProfileCompat"
@@ -40,6 +47,19 @@ class LoadVDBtoVRay(api.Loader):
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(root + ".useOutlinerColor", 1)
+ cmds.setAttr(root + ".outlinerColor",
+ c[0], c[1], c[2])
# Create VR
grid_node = cmds.createNode("VRayVolumeGrid",
diff --git a/pype/plugins/maya/load/load_vrayproxy.py b/pype/plugins/maya/load/load_vrayproxy.py
index 9396e124ce..a3a114440a 100644
--- a/pype/plugins/maya/load/load_vrayproxy.py
+++ b/pype/plugins/maya/load/load_vrayproxy.py
@@ -1,6 +1,7 @@
from avalon.maya import lib
from avalon import api
-
+import json
+import os
import maya.cmds as cmds
@@ -20,6 +21,19 @@ class VRayProxyLoader(api.Loader):
from avalon.maya.pipeline import containerise
from pype.maya.lib import namespaced
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "vrayproxy"
+
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
asset_name = context['asset']["name"]
namespace = namespace or lib.unique_namespace(
asset_name + "_",
@@ -40,6 +54,12 @@ class VRayProxyLoader(api.Loader):
if not nodes:
return
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr("{0}_{1}.useOutlinerColor".format(name, "GRP"), 1)
+ cmds.setAttr("{0}_{1}.outlinerColor".format(name, "GRP"),
+ c[0], c[1], c[2])
+
return containerise(
name=name,
namespace=namespace,
@@ -101,7 +121,7 @@ class VRayProxyLoader(api.Loader):
# Create nodes
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
- vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
+ vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
name="{}_VRMM".format(name))
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
empty=True,
diff --git a/pype/plugins/maya/load/load_yeti_cache.py b/pype/plugins/maya/load/load_yeti_cache.py
index 2160924047..b19bed1393 100644
--- a/pype/plugins/maya/load/load_yeti_cache.py
+++ b/pype/plugins/maya/load/load_yeti_cache.py
@@ -23,6 +23,11 @@ class YetiCacheLoader(api.Loader):
def load(self, context, name=None, namespace=None, data=None):
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "yeticache"
+
# Build namespace
asset = context["asset"]
if namespace is None:
@@ -49,6 +54,19 @@ class YetiCacheLoader(api.Loader):
group_name = "{}:{}".format(namespace, name)
group_node = cmds.group(nodes, name=group_name)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get(family)
+ if c is not None:
+ cmds.setAttr(group_name + ".useOutlinerColor", 1)
+ cmds.setAttr(group_name + ".outlinerColor",
+ c[0], c[1], c[2])
nodes.append(group_node)
diff --git a/pype/plugins/maya/load/load_yeti_rig.py b/pype/plugins/maya/load/load_yeti_rig.py
index 096b936b41..c821c6ca02 100644
--- a/pype/plugins/maya/load/load_yeti_rig.py
+++ b/pype/plugins/maya/load/load_yeti_rig.py
@@ -1,4 +1,6 @@
import pype.maya.plugin
+import os
+import json
class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
@@ -24,6 +26,20 @@ class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
groupReference=True,
groupName="{}:{}".format(namespace, name))
+ groupName = "{}:{}".format(namespace, name)
+ preset_file = os.path.join(
+ os.environ.get('PYPE_STUDIO_TEMPLATES'),
+ 'presets', 'tools',
+ 'family_colors.json'
+ )
+ with open(preset_file, 'r') as cfile:
+ colors = json.load(cfile)
+
+ c = colors.get('yetiRig')
+ if c is not None:
+ cmds.setAttr(groupName + ".useOutlinerColor", 1)
+ cmds.setAttr(groupName + ".outlinerColor",
+ c[0], c[1], c[2])
self[:] = nodes
self.log.info("Yeti Rig Connection Manager will be available soon")
diff --git a/pype/plugins/maya/publish/collect_setdress.py b/pype/plugins/maya/publish/collect_assembly.py
similarity index 89%
rename from pype/plugins/maya/publish/collect_setdress.py
rename to pype/plugins/maya/publish/collect_assembly.py
index bb56163293..76274b1032 100644
--- a/pype/plugins/maya/publish/collect_setdress.py
+++ b/pype/plugins/maya/publish/collect_assembly.py
@@ -5,11 +5,11 @@ from maya import cmds, mel
from avalon import maya as avalon
from pype.maya import lib
-# TODO : Publish of setdress: -unique namespace for all assets, VALIDATOR!
+# TODO : Publish of assembly: -unique namespace for all assets, VALIDATOR!
-class CollectSetDress(pyblish.api.InstancePlugin):
- """Collect all relevant setdress items
+class CollectAssembly(pyblish.api.InstancePlugin):
+ """Collect all relevant assembly items
Collected data:
@@ -24,8 +24,8 @@ class CollectSetDress(pyblish.api.InstancePlugin):
"""
order = pyblish.api.CollectorOrder + 0.49
- label = "Set Dress"
- families = ["setdress"]
+ label = "Assemby"
+ families = ["assembly"]
def process(self, instance):
@@ -35,11 +35,14 @@ class CollectSetDress(pyblish.api.InstancePlugin):
# Get all content from the instance
instance_lookup = set(cmds.ls(instance, type="transform", long=True))
data = defaultdict(list)
+ self.log.info(instance_lookup)
hierarchy_nodes = []
for container in containers:
+ self.log.info(container)
root = lib.get_container_transforms(container, root=True)
+ self.log.info(root)
if not root or root not in instance_lookup:
continue
diff --git a/pype/plugins/maya/publish/collect_look.py b/pype/plugins/maya/publish/collect_look.py
index a0e0f53fea..dfefa15fe5 100644
--- a/pype/plugins/maya/publish/collect_look.py
+++ b/pype/plugins/maya/publish/collect_look.py
@@ -47,6 +47,8 @@ def get_look_attrs(node):
for attr in attrs:
if attr in SHAPE_ATTRS:
result.append(attr)
+ elif attr.startswith('ai'):
+ result.append(attr)
return result
@@ -157,6 +159,9 @@ def get_file_node_path(node):
if any(pattern in lower for pattern in patterns):
return texture_pattern
+ if cmds.nodeType(node) == 'aiImage':
+ return cmds.getAttr('{0}.filename'.format(node))
+
# otherwise use fileTextureName
return cmds.getAttr('{0}.fileTextureName'.format(node))
@@ -202,7 +207,7 @@ class CollectLook(pyblish.api.InstancePlugin):
"""
- order = pyblish.api.CollectorOrder + 0.4
+ order = pyblish.api.CollectorOrder + 0.2
families = ["look"]
label = "Collect Look"
hosts = ["maya"]
@@ -215,6 +220,7 @@ class CollectLook(pyblish.api.InstancePlugin):
# make ftrack publishable
instance.data["families"] = ['ftrack']
+ instance.data['maketx'] = True
def collect(self, instance):
@@ -261,15 +267,37 @@ class CollectLook(pyblish.api.InstancePlugin):
# Collect file nodes used by shading engines (if we have any)
files = list()
looksets = sets.keys()
+ shaderAttrs = [
+ "surfaceShader",
+ "volumeShader",
+ "displacementShader",
+ "aiSurfaceShader",
+ "aiVolumeShader"]
+ materials = list()
+
if looksets:
+ for look in looksets:
+ for at in shaderAttrs:
+ con = cmds.listConnections("{}.{}".format(look, at))
+ if con:
+ materials.extend(con)
+
+ self.log.info("Found materials:\n{}".format(materials))
+
self.log.info("Found the following sets:\n{}".format(looksets))
# Get the entire node chain of the look sets
- history = cmds.listHistory(looksets)
+ # history = cmds.listHistory(looksets)
+ history = list()
+ for material in materials:
+ history.extend(cmds.listHistory(material))
files = cmds.ls(history, type="file", long=True)
+ files.extend(cmds.ls(history, type="aiImage", long=True))
+ self.log.info("Collected file nodes:\n{}".format(files))
# Collect textures if any file nodes are found
instance.data["resources"] = [self.collect_resource(n)
for n in files]
+ self.log.info("Collected resources:\n{}".format(instance.data["resources"]))
# Log a warning when no relevant sets were retrieved for the look.
if not instance.data["lookData"]["relationships"]:
@@ -361,6 +389,8 @@ class CollectLook(pyblish.api.InstancePlugin):
# Collect changes to "custom" attributes
node_attrs = get_look_attrs(node)
+ self.log.info(node_attrs)
+
# Only include if there are any properties we care about
if not node_attrs:
continue
@@ -387,13 +417,21 @@ class CollectLook(pyblish.api.InstancePlugin):
dict
"""
- attribute = "{}.fileTextureName".format(node)
+ self.log.debug("processing: {}".format(node))
+ if cmds.nodeType(node) == 'file':
+ self.log.debug("file node")
+ attribute = "{}.fileTextureName".format(node)
+ computed_attribute = "{}.computedFileTextureNamePattern".format(node)
+ elif cmds.nodeType(node) == 'aiImage':
+ self.log.debug("aiImage node")
+ attribute = "{}.filename".format(node)
+ computed_attribute = attribute
source = cmds.getAttr(attribute)
# Compare with the computed file path, e.g. the one with the
# pattern in it, to generate some logging information about this
# difference
- computed_attribute = "{}.computedFileTextureNamePattern".format(node)
+ # computed_attribute = "{}.computedFileTextureNamePattern".format(node)
computed_source = cmds.getAttr(computed_attribute)
if source != computed_source:
self.log.debug("Detected computed file pattern difference "
diff --git a/pype/plugins/maya/publish/collect_model.py b/pype/plugins/maya/publish/collect_model.py
index f8e25a9fc1..0fe5f29054 100644
--- a/pype/plugins/maya/publish/collect_model.py
+++ b/pype/plugins/maya/publish/collect_model.py
@@ -15,7 +15,7 @@ class CollectModelData(pyblish.api.InstancePlugin):
"""
- order = pyblish.api.CollectorOrder + 0.4
+ order = pyblish.api.CollectorOrder + 0.2
label = 'Collect Model Data'
families = ["model"]
diff --git a/pype/plugins/maya/publish/collect_renderlayers.py b/pype/plugins/maya/publish/collect_renderlayers.py
index c926baf803..d2c64e2117 100644
--- a/pype/plugins/maya/publish/collect_renderlayers.py
+++ b/pype/plugins/maya/publish/collect_renderlayers.py
@@ -20,7 +20,7 @@ class CollectMayaRenderlayers(pyblish.api.ContextPlugin):
# Get render globals node
try:
- render_globals = cmds.ls("renderglobalsDefault")[0]
+ render_globals = cmds.ls("renderglobalsMain")[0]
except IndexError:
self.log.info("Skipping renderlayer collection, no "
"renderGlobalsDefault found..")
diff --git a/pype/plugins/maya/publish/collect_review.py b/pype/plugins/maya/publish/collect_review.py
index b318a8de40..a0d434cafa 100644
--- a/pype/plugins/maya/publish/collect_review.py
+++ b/pype/plugins/maya/publish/collect_review.py
@@ -60,6 +60,7 @@ class CollectReview(pyblish.api.InstancePlugin):
data['step'] = instance.data['step']
data['fps'] = instance.data['fps']
cmds.setAttr(str(instance) + '.active', 0)
+ self.log.debug('data {}'.format(instance.context[i].data))
instance.context[i].data.update(data)
instance.data['remove'] = True
i += 1
diff --git a/pype/plugins/maya/publish/collect_scene.py b/pype/plugins/maya/publish/collect_scene.py
index 1404fdc259..43b4c842e3 100644
--- a/pype/plugins/maya/publish/collect_scene.py
+++ b/pype/plugins/maya/publish/collect_scene.py
@@ -10,7 +10,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.1
- label = "Maya Scene"
+ label = "Maya Workfile"
hosts = ['maya']
def process(self, context):
@@ -29,14 +29,14 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
# create instance
instance = context.create_instance(name=filename)
- subset = 'scene' + task.capitalize()
+ subset = 'workfile' + task.capitalize()
data.update({
"subset": subset,
"asset": os.getenv("AVALON_ASSET", None),
"label": subset,
- "publish": True,
- "family": 'scene',
+ "publish": False,
+ "family": 'workfile',
"representation": "ma",
"setMembers": [current_file],
"stagingDir": folder
diff --git a/pype/plugins/maya/publish/extract_setdress.py b/pype/plugins/maya/publish/extract_assembly.py
similarity index 93%
rename from pype/plugins/maya/publish/extract_setdress.py
rename to pype/plugins/maya/publish/extract_assembly.py
index c4d613dc61..26b16a73c4 100644
--- a/pype/plugins/maya/publish/extract_setdress.py
+++ b/pype/plugins/maya/publish/extract_assembly.py
@@ -8,7 +8,7 @@ from pype.maya.lib import extract_alembic
from maya import cmds
-class ExtractSetDress(pype.api.Extractor):
+class ExtractAssembly(pype.api.Extractor):
"""Produce an alembic of just point positions and normals.
Positions and normals are preserved, but nothing more,
@@ -16,9 +16,9 @@ class ExtractSetDress(pype.api.Extractor):
"""
- label = "Extract Set Dress"
+ label = "Extract Assembly"
hosts = ["maya"]
- families = ["setdress"]
+ families = ["assembly"]
def process(self, instance):
diff --git a/pype/plugins/maya/publish/extract_look.py b/pype/plugins/maya/publish/extract_look.py
index a30b1fe7d5..f6fdda8593 100644
--- a/pype/plugins/maya/publish/extract_look.py
+++ b/pype/plugins/maya/publish/extract_look.py
@@ -2,16 +2,97 @@ import os
import json
import tempfile
import contextlib
+import subprocess
from collections import OrderedDict
from maya import cmds
import pyblish.api
import avalon.maya
+from avalon import io
import pype.api
import pype.maya.lib as lib
+# Modes for transfer
+COPY = 1
+HARDLINK = 2
+
+
+def source_hash(filepath, *args):
+ """Generate simple identifier for a source file.
+ This is used to identify whether a source file has previously been
+ processe into the pipeline, e.g. a texture.
+ The hash is based on source filepath, modification time and file size.
+ This is only used to identify whether a specific source file was already
+ published before from the same location with the same modification date.
+ We opt to do it this way as opposed to Avalanch C4 hash as this is much
+ faster and predictable enough for all our production use cases.
+ Args:
+ filepath (str): The source file path.
+ You can specify additional arguments in the function
+ to allow for specific 'processing' values to be included.
+ """
+ # We replace dots with comma because . cannot be a key in a pymongo dict.
+ file_name = os.path.basename(filepath)
+ time = str(os.path.getmtime(filepath))
+ size = str(os.path.getsize(filepath))
+ return "|".join([
+ file_name,
+ time,
+ size
+ ] + list(args)).replace(".", ",")
+
+
+def find_paths_by_hash(texture_hash):
+ # Find the texture hash key in the dictionary and all paths that
+ # originate from it.
+ key = "data.sourceHashes.{0}".format(texture_hash)
+ return io.distinct(key, {"type": "version"})
+
+
+def maketx(source, destination, *args):
+ """Make .tx using maketx with some default settings.
+ The settings are based on default as used in Arnold's
+ txManager in the scene.
+ This function requires the `maketx` executable to be
+ on the `PATH`.
+ Args:
+ source (str): Path to source file.
+ destination (str): Writing destination path.
+ """
+
+ cmd = [
+ "maketx",
+ "-v", # verbose
+ "-u", # update mode
+ # unpremultiply before conversion (recommended when alpha present)
+ "--unpremult",
+ # use oiio-optimized settings for tile-size, planarconfig, metadata
+ "--oiio"
+ ]
+ cmd.extend(args)
+ cmd.extend([
+ "-o", destination,
+ source
+ ])
+
+ CREATE_NO_WINDOW = 0x08000000
+ try:
+ out = subprocess.check_output(
+ cmd,
+ stderr=subprocess.STDOUT,
+ creationflags=CREATE_NO_WINDOW
+ )
+ except subprocess.CalledProcessError as exc:
+ print exc
+ print out
+ import traceback
+ traceback.print_exc()
+ raise
+
+ return out
+
@contextlib.contextmanager
def no_workspace_dir():
@@ -79,12 +160,53 @@ class ExtractLook(pype.api.Extractor):
relationships = lookdata["relationships"]
sets = relationships.keys()
+ # Extract the textures to transfer, possibly convert with maketx and
+ # remap the node paths to the destination path. Note that a source
+ # might be included more than once amongst the resources as they could
+ # be the input file to multiple nodes.
resources = instance.data["resources"]
+ do_maketx = instance.data.get("maketx", False)
+ # Collect all unique files used in the resources
+ files = set()
+ for resource in resources:
+ files.update(os.path.normpath(f) for f in resource["files"])
+
+ # Process the resource files
+ transfers = list()
+ hardlinks = list()
+ hashes = dict()
+ for filepath in files:
+ source, mode, hash = self._process_texture(
+ filepath, do_maketx, staging=dir_path
+ )
+ destination = self.resource_destination(
+ instance, source, do_maketx
+ )
+ if mode == COPY:
+ transfers.append((source, destination))
+ elif mode == HARDLINK:
+ hardlinks.append((source, destination))
+
+ # Store the hashes from hash to destination to include in the
+ # database
+ hashes[hash] = destination
+
+ # Remap the resources to the destination path (change node attributes)
+ destinations = dict()
remap = OrderedDict() # needs to be ordered, see color space values
for resource in resources:
+ source = os.path.normpath(resource["source"])
+ if source not in destinations:
+ # Cache destination as source resource might be included
+ # multiple times
+ destinations[source] = self.resource_destination(
+ instance, source, do_maketx
+ )
+
+ # Remap file node filename to destination
attr = resource['attribute']
- remap[attr] = resource['destination']
+ remap[attr] = destinations[source]
# Preserve color space values (force value after filepath change)
# This will also trigger in the same order at end of context to
@@ -107,15 +229,17 @@ class ExtractLook(pype.api.Extractor):
with lib.attribute_values(remap):
with avalon.maya.maintained_selection():
cmds.select(sets, noExpand=True)
- cmds.file(maya_path,
- force=True,
- typ="mayaAscii",
- exportSelected=True,
- preserveReferences=False,
- channels=True,
- constraints=True,
- expressions=True,
- constructionHistory=True)
+ cmds.file(
+ maya_path,
+ force=True,
+ typ="mayaAscii",
+ exportSelected=True,
+ preserveReferences=False,
+ channels=True,
+ constraints=True,
+ expressions=True,
+ constructionHistory=True
+ )
# Write the JSON data
self.log.info("Extract json..")
@@ -127,9 +251,90 @@ class ExtractLook(pype.api.Extractor):
if "files" not in instance.data:
instance.data["files"] = list()
+ if "hardlinks" not in instance.data:
+ instance.data["hardlinks"] = list()
+ if "transfers" not in instance.data:
+ instance.data["transfers"] = list()
instance.data["files"].append(maya_fname)
instance.data["files"].append(json_fname)
- self.log.info("Extracted instance '%s' to: %s" % (instance.name,
- maya_path))
+ # Set up the resources transfers/links for the integrator
+ instance.data["transfers"].extend(transfers)
+ instance.data["hardlinks"].extend(hardlinks)
+
+ # Source hash for the textures
+ instance.data["sourceHashes"] = hashes
+
+ self.log.info("Extracted instance '%s' to: %s" % (
+ instance.name, maya_path)
+ )
+
+ def resource_destination(self, instance, filepath, do_maketx):
+
+ # Compute destination location
+ basename, ext = os.path.splitext(os.path.basename(filepath))
+
+ # If maketx then the texture will always end with .tx
+ if do_maketx:
+ ext = ".tx"
+
+ return os.path.join(
+ instance.data["assumedDestination"],
+ "resources",
+ basename + ext
+ )
+
+ def _process_texture(self, filepath, do_maketx, staging):
+ """Process a single texture file on disk for publishing.
+ This will:
+ 1. Check whether it's already published, if so it will do hardlink
+ 2. If not published and maketx is enabled, generate a new .tx file.
+ 3. Compute the destination path for the source file.
+ Args:
+ filepath (str): The source file path to process.
+ do_maketx (bool): Whether to produce a .tx file
+ Returns:
+ """
+
+ fname, ext = os.path.splitext(os.path.basename(filepath))
+
+ args = []
+ if do_maketx:
+ args.append("maketx")
+ texture_hash = source_hash(filepath, *args)
+
+ # If source has been published before with the same settings,
+ # then don't reprocess but hardlink from the original
+ existing = find_paths_by_hash(texture_hash)
+ if existing:
+ self.log.info("Found hash in database, preparing hardlink..")
+ source = next((p for p in existing if os.path.exists(p)), None)
+ if filepath:
+ return source, HARDLINK, texture_hash
+ else:
+ self.log.warning(
+ "Paths not found on disk, "
+ "skipping hardlink: %s" % (existing,)
+ )
+
+ if do_maketx and ext != ".tx":
+ # Produce .tx file in staging if source file is not .tx
+ converted = os.path.join(
+ staging,
+ "resources",
+ fname + ".tx"
+ )
+
+ # Ensure folder exists
+ if not os.path.exists(os.path.dirname(converted)):
+ os.makedirs(os.path.dirname(converted))
+
+ self.log.info("Generating .tx file for %s .." % filepath)
+ maketx(filepath, converted,
+ # Include `source-hash` as string metadata
+ "-sattrib", "sourceHash", texture_hash)
+
+ return converted, COPY, texture_hash
+
+ return filepath, COPY, texture_hash
diff --git a/pype/plugins/maya/publish/extract_maya_ascii_raw.py b/pype/plugins/maya/publish/extract_maya_ascii_raw.py
index ba5d25cc88..70c6f246f6 100644
--- a/pype/plugins/maya/publish/extract_maya_ascii_raw.py
+++ b/pype/plugins/maya/publish/extract_maya_ascii_raw.py
@@ -15,7 +15,9 @@ class ExtractMayaAsciiRaw(pype.api.Extractor):
label = "Maya ASCII (Raw)"
hosts = ["maya"]
- families = ["mayaAscii"]
+ families = ["mayaAscii",
+ "setdress",
+ "layout"]
def process(self, instance):
diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py
index 84f83c8e92..0a97a9b98f 100644
--- a/pype/plugins/maya/publish/submit_maya_deadline.py
+++ b/pype/plugins/maya/publish/submit_maya_deadline.py
@@ -92,7 +92,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit available render layers to Deadline
Renders are submitted to a Deadline Web Service as
- supplied via the environment variable AVALON_DEADLINE
+ supplied via the environment variable DEADLINE_REST_URL
"""
@@ -104,9 +104,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
def process(self, instance):
- AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
"http://localhost:8082")
- assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+ assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
context = instance.context
workspace = context.data["workspaceDir"]
@@ -231,14 +231,15 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"MAYA_MODULE_PATH",
"ARNOLD_PLUGIN_PATH",
"AVALON_SCHEMA",
+ "FTRACK_API_KEY",
+ "FTRACK_API_USER",
+ "FTRACK_SERVER",
+ "PYBLISHPLUGINPATH",
# todo: This is a temporary fix for yeti variables
"PEREGRINEL_LICENSE",
- "REDSHIFT_MAYAEXTENSIONSPATH",
- "REDSHIFT_DISABLEOUTPUTLOCKFILES"
- "VRAY_FOR_MAYA2018_PLUGINS_X64",
- "VRAY_PLUGINS_X64",
- "VRAY_USE_THREAD_AFFINITY",
+ "SOLIDANGLE_LICENSE",
+ "ARNOLD_LICENSE"
"MAYA_MODULE_PATH",
"TOOL_ENV"
]
@@ -274,6 +275,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
clean_path += os.path.normpath(path) + os.pathsep
except UnicodeDecodeError:
print('path contains non UTF characters')
+
+ if key == "PYTHONPATH":
+ clean_path = clean_path.replace('python2', 'python3')
clean_path = clean_path.replace(
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
os.path.normpath(environment['PYPE_STUDIO_CORE']))
@@ -301,7 +305,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
- url = "{}/api/jobs".format(AVALON_DEADLINE)
+ url = "{}/api/jobs".format(DEADLINE_REST_URL)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
diff --git a/pype/plugins/maya/publish/submit_vray_deadline.py b/pype/plugins/maya/publish/submit_vray_deadline.py
index d43e92c6d8..16625e73f7 100644
--- a/pype/plugins/maya/publish/submit_vray_deadline.py
+++ b/pype/plugins/maya/publish/submit_vray_deadline.py
@@ -28,13 +28,13 @@ class VraySubmitDeadline(pyblish.api.InstancePlugin):
def process(self, instance):
- AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
"http://localhost:8082")
- assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+ assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
context = instance.context
- deadline_url = "{}/api/jobs".format(AVALON_DEADLINE)
+ deadline_url = "{}/api/jobs".format(DEADLINE_REST_URL)
deadline_user = context.data.get("deadlineUser", getpass.getuser())
filepath = context.data["currentFile"]
diff --git a/pype/plugins/maya/publish/validate_setdress_namespaces.py b/pype/plugins/maya/publish/validate_assembly_namespaces.py
similarity index 87%
rename from pype/plugins/maya/publish/validate_setdress_namespaces.py
rename to pype/plugins/maya/publish/validate_assembly_namespaces.py
index 0d3b75ca37..f4519e9f4e 100644
--- a/pype/plugins/maya/publish/validate_setdress_namespaces.py
+++ b/pype/plugins/maya/publish/validate_assembly_namespaces.py
@@ -3,7 +3,7 @@ import pype.api
import pype.maya.action
-class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
+class ValidateAssemblyNamespaces(pyblish.api.InstancePlugin):
"""Ensure namespaces are not nested
In the outliner an item in a normal namespace looks as following:
@@ -15,9 +15,9 @@ class ValidateSetdressNamespaces(pyblish.api.InstancePlugin):
"""
- label = "Validate Setdress Namespaces"
+ label = "Validate Assembly Namespaces"
order = pyblish.api.ValidatorOrder
- families = ["setdress"]
+ families = ["assembly"]
actions = [pype.maya.action.SelectInvalidAction]
def process(self, instance):
diff --git a/pype/plugins/maya/publish/validate_setdress_transforms.py b/pype/plugins/maya/publish/validate_assembly_transforms.py
similarity index 94%
rename from pype/plugins/maya/publish/validate_setdress_transforms.py
rename to pype/plugins/maya/publish/validate_assembly_transforms.py
index 918f4670be..9d46655cdc 100644
--- a/pype/plugins/maya/publish/validate_setdress_transforms.py
+++ b/pype/plugins/maya/publish/validate_assembly_transforms.py
@@ -6,7 +6,7 @@ from maya import cmds
import pype.maya.action
-class ValidateSetDressModelTransforms(pyblish.api.InstancePlugin):
+class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin):
"""Verify only root nodes of the loaded asset have transformations.
Note: This check is temporary and is subject to change.
@@ -26,8 +26,8 @@ class ValidateSetDressModelTransforms(pyblish.api.InstancePlugin):
"""
order = pyblish.api.ValidatorOrder + 0.49
- label = "Setdress Model Transforms"
- families = ["setdress"]
+ label = "Assembly Model Transforms"
+ families = ["assembly"]
actions = [pype.maya.action.SelectInvalidAction,
pype.api.RepairAction]
@@ -38,7 +38,7 @@ class ValidateSetDressModelTransforms(pyblish.api.InstancePlugin):
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError("Found {} invalid transforms of setdress "
+ raise RuntimeError("Found {} invalid transforms of assembly "
"items".format(len(invalid)))
@classmethod
diff --git a/pype/plugins/maya/publish/validate_color_sets.py b/pype/plugins/maya/publish/validate_color_sets.py
new file mode 100644
index 0000000000..8dd78bbc6a
--- /dev/null
+++ b/pype/plugins/maya/publish/validate_color_sets.py
@@ -0,0 +1,54 @@
+from maya import cmds
+
+import pyblish.api
+import pype.api
+import pype.maya.action
+
+
+class ValidateColorSets(pyblish.api.Validator):
+ """Validate all meshes in the instance have unlocked normals
+
+ These can be removed manually through:
+ Modeling > Mesh Display > Color Sets Editor
+
+ """
+
+ order = pype.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['model']
+ category = 'geometry'
+ label = 'Mesh ColorSets'
+ actions = [pype.maya.action.SelectInvalidAction,
+ pype.api.RepairAction]
+ optional = True
+
+ @staticmethod
+ def has_color_sets(mesh):
+ """Return whether a mesh node has locked normals"""
+ return cmds.polyColorSet(mesh,
+ allColorSets=True,
+ query=True)
+
+ @classmethod
+ def get_invalid(cls, instance):
+ """Return the meshes with ColorSets in instance"""
+
+ meshes = cmds.ls(instance, type='mesh', long=True)
+ return [mesh for mesh in meshes if cls.has_color_sets(mesh)]
+
+ def process(self, instance):
+ """Raise invalid when any of the meshes have ColorSets"""
+
+ invalid = self.get_invalid(instance)
+
+ if invalid:
+ raise ValueError("Meshes found with "
+ "Color Sets: {0}".format(invalid))
+
+ @classmethod
+ def repair(cls, instance):
+ """Remove all Color Sets on the meshes in this instance."""
+ invalid = cls.get_invalid(instance)
+ for mesh in invalid:
+ for set in cmds.polyColorSet(mesh, acs=True, q=True):
+ cmds.polyColorSet(mesh, colorSet=set, delete=True)
diff --git a/pype/plugins/maya/publish/validate_deadline_connection.py b/pype/plugins/maya/publish/validate_deadline_connection.py
index 66ae32f024..b89e3c9b2e 100644
--- a/pype/plugins/maya/publish/validate_deadline_connection.py
+++ b/pype/plugins/maya/publish/validate_deadline_connection.py
@@ -1,6 +1,5 @@
import pyblish.api
-import avalon.api as api
from avalon.vendor import requests
from pype.plugin import contextplugin_should_run
import os
@@ -20,12 +19,13 @@ class ValidateDeadlineConnection(pyblish.api.ContextPlugin):
return
try:
- AVALON_DEADLINE = os.environ["AVALON_DEADLINE"]
+ DEADLINE_REST_URL = os.environ["DEADLINE_REST_URL"]
except KeyError:
self.log.error("Deadline REST API url not found.")
+ raise ValueError("Deadline REST API url not found.")
# Check response
- response = requests.get(AVALON_DEADLINE)
+ response = requests.get(DEADLINE_REST_URL)
assert response.ok, "Response must be ok"
assert response.text.startswith("Deadline Web Service "), (
"Web service did not respond with 'Deadline Web Service'"
diff --git a/pype/plugins/maya/publish/validate_look_sets.py b/pype/plugins/maya/publish/validate_look_sets.py
index f3952b53a4..cfa499c763 100644
--- a/pype/plugins/maya/publish/validate_look_sets.py
+++ b/pype/plugins/maya/publish/validate_look_sets.py
@@ -73,11 +73,13 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
# check if any objectSets are not present ion the relationships
missing_sets = [s for s in sets if s not in relationships]
if missing_sets:
- # A set of this node is not coming along, this is wrong!
- cls.log.error("Missing sets '{}' for node "
- "'{}'".format(missing_sets, node))
- invalid.append(node)
- continue
+ for set in missing_sets:
+ if '_SET' not in set:
+ # A set of this node is not coming along, this is wrong!
+ cls.log.error("Missing sets '{}' for node "
+ "'{}'".format(missing_sets, node))
+ invalid.append(node)
+ continue
# Ensure the node is in the sets that are collected
for shaderset, data in relationships.items():
diff --git a/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py b/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py
new file mode 100644
index 0000000000..3aae97b8fd
--- /dev/null
+++ b/pype/plugins/maya/publish/validate_mesh_overlapping_uvs.py
@@ -0,0 +1,277 @@
+from maya import cmds
+
+import pyblish.api
+import pype.api
+import pype.maya.action
+import math
+import maya.api.OpenMaya as om
+from pymel.core import polyUVSet
+
+
+class GetOverlappingUVs(object):
+
+ def _createBoundingCircle(self, meshfn):
+ """ Represent a face by center and radius
+
+ :param meshfn: MFnMesh class
+ :type meshfn: :class:`maya.api.OpenMaya.MFnMesh`
+ :returns: (center, radius)
+ :rtype: tuple
+ """
+ center = []
+ radius = []
+ for i in xrange(meshfn.numPolygons): # noqa: F821
+ # get uvs from face
+ uarray = []
+ varray = []
+ for j in range(len(meshfn.getPolygonVertices(i))):
+ uv = meshfn.getPolygonUV(i, j)
+ uarray.append(uv[0])
+ varray.append(uv[1])
+
+ # loop through all vertices to construct edges/rays
+ cu = 0.0
+ cv = 0.0
+ for j in range(len(uarray)):
+ cu += uarray[j]
+ cv += varray[j]
+
+ cu /= len(uarray)
+ cv /= len(varray)
+ rsqr = 0.0
+ for j in range(len(varray)):
+ du = uarray[j] - cu
+ dv = varray[j] - cv
+ dsqr = du * du + dv * dv
+ rsqr = dsqr if dsqr > rsqr else rsqr
+
+ center.append(cu)
+ center.append(cv)
+ radius.append(math.sqrt(rsqr))
+
+ return center, radius
+
+ def _createRayGivenFace(self, meshfn, faceId):
+ """ Represent a face by a series of edges(rays), i.e.
+
+ :param meshfn: MFnMesh class
+ :type meshfn: :class:`maya.api.OpenMaya.MFnMesh`
+ :param faceId: face id
+ :type faceId: int
+ :returns: False if no valid uv's.
+ ""(True, orig, vec)"" or ""(False, None, None)""
+ :rtype: tuple
+
+ .. code-block:: python
+
+ orig = [orig1u, orig1v, orig2u, orig2v, ... ]
+ vec = [vec1u, vec1v, vec2u, vec2v, ... ]
+ """
+ orig = []
+ vec = []
+ # get uvs
+ uarray = []
+ varray = []
+ for i in range(len(meshfn.getPolygonVertices(faceId))):
+ uv = meshfn.getPolygonUV(faceId, i)
+ uarray.append(uv[0])
+ varray.append(uv[1])
+
+ if len(uarray) == 0 or len(varray) == 0:
+ return (False, None, None)
+
+ # loop throught all vertices to construct edges/rays
+ u = uarray[-1]
+ v = varray[-1]
+ for i in xrange(len(uarray)): # noqa: F821
+ orig.append(uarray[i])
+ orig.append(varray[i])
+ vec.append(u - uarray[i])
+ vec.append(v - varray[i])
+ u = uarray[i]
+ v = varray[i]
+
+ return (True, orig, vec)
+
+ def _checkCrossingEdges(self,
+ face1Orig,
+ face1Vec,
+ face2Orig,
+ face2Vec):
+ """ Check if there are crossing edges between two faces.
+ Return True if there are crossing edges and False otherwise.
+
+ :param face1Orig: origin of face 1
+ :type face1Orig: tuple
+ :param face1Vec: face 1 edges
+ :type face1Vec: list
+ :param face2Orig: origin of face 2
+ :type face2Orig: tuple
+ :param face2Vec: face 2 edges
+ :type face2Vec: list
+
+ A face is represented by a series of edges(rays), i.e.
+ .. code-block:: python
+
+ faceOrig[] = [orig1u, orig1v, orig2u, orig2v, ... ]
+ faceVec[] = [vec1u, vec1v, vec2u, vec2v, ... ]
+ """
+ face1Size = len(face1Orig)
+ face2Size = len(face2Orig)
+ for i in xrange(0, face1Size, 2): # noqa: F821
+ o1x = face1Orig[i]
+ o1y = face1Orig[i+1]
+ v1x = face1Vec[i]
+ v1y = face1Vec[i+1]
+ n1x = v1y
+ n1y = -v1x
+ for j in xrange(0, face2Size, 2): # noqa: F821
+ # Given ray1(O1, V1) and ray2(O2, V2)
+ # Normal of ray1 is (V1.y, V1.x)
+ o2x = face2Orig[j]
+ o2y = face2Orig[j+1]
+ v2x = face2Vec[j]
+ v2y = face2Vec[j+1]
+ n2x = v2y
+ n2y = -v2x
+
+ # Find t for ray2
+ # t = [(o1x-o2x)n1x + (o1y-o2y)n1y] /
+ # (v2x * n1x + v2y * n1y)
+ denum = v2x * n1x + v2y * n1y
+ # Edges are parallel if denum is close to 0.
+ if math.fabs(denum) < 0.000001:
+ continue
+ t2 = ((o1x-o2x) * n1x + (o1y-o2y) * n1y) / denum
+ if (t2 < 0.00001 or t2 > 0.99999):
+ continue
+
+ # Find t for ray1
+ # t = [(o2x-o1x)n2x
+ # + (o2y-o1y)n2y] / (v1x * n2x + v1y * n2y)
+ denum = v1x * n2x + v1y * n2y
+ # Edges are parallel if denum is close to 0.
+ if math.fabs(denum) < 0.000001:
+ continue
+ t1 = ((o2x-o1x) * n2x + (o2y-o1y) * n2y) / denum
+
+ # Edges intersect
+ if (t1 > 0.00001 and t1 < 0.99999):
+ return 1
+
+ return 0
+
+ def _getOverlapUVFaces(self, meshName):
+ """ Return overlapping faces
+
+ :param meshName: name of mesh
+ :type meshName: str
+ :returns: list of overlapping faces
+ :rtype: list
+ """
+ faces = []
+ # find polygon mesh node
+ selList = om.MSelectionList()
+ selList.add(meshName)
+ mesh = selList.getDependNode(0)
+ if mesh.apiType() == om.MFn.kTransform:
+ dagPath = selList.getDagPath(0)
+ dagFn = om.MFnDagNode(dagPath)
+ child = dagFn.child(0)
+ if child.apiType() != om.MFn.kMesh:
+ raise Exception("Can't find polygon mesh")
+ mesh = child
+ meshfn = om.MFnMesh(mesh)
+
+ center, radius = self._createBoundingCircle(meshfn)
+ for i in xrange(meshfn.numPolygons): # noqa: F821
+ rayb1, face1Orig, face1Vec = self._createRayGivenFace(
+ meshfn, i)
+ if not rayb1:
+ continue
+ cui = center[2*i]
+ cvi = center[2*i+1]
+ ri = radius[i]
+ # Exclude the degenerate face
+ # if(area(face1Orig) < 0.000001) continue;
+ # Loop through face j where j != i
+ for j in range(i+1, meshfn.numPolygons):
+ cuj = center[2*j]
+ cvj = center[2*j+1]
+ rj = radius[j]
+ du = cuj - cui
+ dv = cvj - cvi
+ dsqr = du * du + dv * dv
+ # Quick rejection if bounding circles don't overlap
+ if (dsqr >= (ri + rj) * (ri + rj)):
+ continue
+
+ rayb2, face2Orig, face2Vec = self._createRayGivenFace(
+ meshfn, j)
+ if not rayb2:
+ continue
+ # Exclude the degenerate face
+ # if(area(face2Orig) < 0.000001): continue;
+ if self._checkCrossingEdges(face1Orig,
+ face1Vec,
+ face2Orig,
+ face2Vec):
+ face1 = '%s.f[%d]' % (meshfn.name(), i)
+ face2 = '%s.f[%d]' % (meshfn.name(), j)
+ if face1 not in faces:
+ faces.append(face1)
+ if face2 not in faces:
+ faces.append(face2)
+ return faces
+
+
+class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin):
+ """ Validate the current mesh overlapping UVs.
+
+ It validates whether the current UVs are overlapping or not.
+ It is optional to warn publisher about it.
+ """
+
+ order = pype.api.ValidateMeshOrder
+ hosts = ['maya']
+ families = ['model']
+ category = 'geometry'
+ label = 'Mesh Has Overlapping UVs'
+ actions = [pype.maya.action.SelectInvalidAction]
+ optional = True
+
+ @classmethod
+ def _has_overlapping_uvs(cls, node):
+ """ Check if mesh has overlapping UVs.
+
+ :param node: node to check
+ :type node: str
+ :returns: True is has overlapping UVs, False otherwise
+ :rtype: bool
+ """
+ ovl = GetOverlappingUVs()
+
+ for i, uv in enumerate(polyUVSet(node, q=1, auv=1)):
+ polyUVSet(node, cuv=1, uvSet=uv)
+ of = ovl._getOverlapUVFaces(str(node))
+ if of != []:
+ return True
+ return False
+
+ @classmethod
+ def get_invalid(cls, instance):
+ invalid = []
+
+ for node in cmds.ls(instance, type='mesh'):
+ if cls._has_overlapping_uvs(node):
+ invalid.append(node)
+
+ return invalid
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise RuntimeError("Meshes found with overlapping "
+ "UVs: {0}".format(invalid))
+ pass
diff --git a/pype/plugins/maya/publish/validate_node_ids.py b/pype/plugins/maya/publish/validate_node_ids.py
index c3b989f282..9102ef13ed 100644
--- a/pype/plugins/maya/publish/validate_node_ids.py
+++ b/pype/plugins/maya/publish/validate_node_ids.py
@@ -22,8 +22,8 @@ class ValidateNodeIDs(pyblish.api.InstancePlugin):
"rig",
"pointcache",
"animation",
- "setdress",
- "yetiRig"]
+ "yetiRig",
+ "assembly"]
actions = [pype.maya.action.SelectInvalidAction,
pype.maya.action.GenerateUUIDsOnInvalidAction]
diff --git a/pype/plugins/maya/publish/validate_resources.py b/pype/plugins/maya/publish/validate_resources.py
index bc10d3003c..47a94e7529 100644
--- a/pype/plugins/maya/publish/validate_resources.py
+++ b/pype/plugins/maya/publish/validate_resources.py
@@ -1,8 +1,9 @@
+import os
+from collections import defaultdict
+
import pyblish.api
import pype.api
-import os
-
class ValidateResources(pyblish.api.InstancePlugin):
"""Validates mapped resources.
@@ -12,18 +13,45 @@ class ValidateResources(pyblish.api.InstancePlugin):
media.
This validates:
- - The resources are existing files.
- - The resources have correctly collected the data.
+ - The resources have unique filenames (without extension)
"""
order = pype.api.ValidateContentsOrder
- label = "Resources"
+ label = "Resources Unique"
def process(self, instance):
- for resource in instance.data.get('resources', []):
- # Required data
- assert "source" in resource, "No source found"
- assert "files" in resource, "No files from source"
- assert all(os.path.exists(f) for f in resource['files'])
+ resources = instance.data.get("resources", [])
+ if not resources:
+ self.log.debug("No resources to validate..")
+ return
+
+ basenames = defaultdict(set)
+
+ for resource in resources:
+ files = resource.get("files", [])
+ for filename in files:
+
+ # Use normalized paths in comparison and ignore case
+ # sensitivity
+ filename = os.path.normpath(filename).lower()
+
+ basename = os.path.splitext(os.path.basename(filename))[0]
+ basenames[basename].add(filename)
+
+ invalid_resources = list()
+ for basename, sources in basenames.items():
+ if len(sources) > 1:
+ invalid_resources.extend(sources)
+
+ self.log.error(
+ "Non-unique resource name: {0}"
+ "{0} (sources: {1})".format(
+ basename,
+ list(sources)
+ )
+ )
+
+ if invalid_resources:
+ raise RuntimeError("Invalid resources in instance.")
diff --git a/pype/plugins/maya/publish/validate_transfers.py b/pype/plugins/maya/publish/validate_transfers.py
deleted file mode 100644
index 3234b2240e..0000000000
--- a/pype/plugins/maya/publish/validate_transfers.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import pyblish.api
-import pype.api
-import os
-
-from collections import defaultdict
-
-
-class ValidateTransfers(pyblish.api.InstancePlugin):
- """Validates mapped resources.
-
- This validates:
- - The resources all transfer to a unique destination.
-
- """
-
- order = pype.api.ValidateContentsOrder
- label = "Transfers"
-
- def process(self, instance):
-
- transfers = instance.data.get("transfers", [])
- if not transfers:
- return
-
- # Collect all destination with its sources
- collected = defaultdict(set)
- for source, destination in transfers:
-
- # Use normalized paths in comparison and ignore case sensitivity
- source = os.path.normpath(source).lower()
- destination = os.path.normpath(destination).lower()
-
- collected[destination].add(source)
-
- invalid_destinations = list()
- for destination, sources in collected.items():
- if len(sources) > 1:
- invalid_destinations.append(destination)
-
- self.log.error("Non-unique file transfer for resources: "
- "{0} (sources: {1})".format(destination,
- list(sources)))
-
- if invalid_destinations:
- raise RuntimeError("Invalid transfers in queue.")
diff --git a/pype/plugins/nuke/_publish_unused/submit_deadline.py b/pype/plugins/nuke/_publish_unused/submit_deadline.py
index ffb298f75d..b5476876e0 100644
--- a/pype/plugins/nuke/_publish_unused/submit_deadline.py
+++ b/pype/plugins/nuke/_publish_unused/submit_deadline.py
@@ -13,7 +13,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit current Comp to Deadline
Renders are submitted to a Deadline Web Service as
- supplied via the environment variable AVALON_DEADLINE
+ supplied via the environment variable DEADLINE_REST_URL
"""
@@ -32,9 +32,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
else:
context.data[key] = True
- AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
+ DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
"http://localhost:8082")
- assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
+ assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
# Collect all saver instances in context that are to be rendered
write_instances = []
@@ -137,7 +137,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
- url = "{}/api/jobs".format(AVALON_DEADLINE)
+ url = "{}/api/jobs".format(DEADLINE_REST_URL)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
diff --git a/pype/plugins/nuke/load/load_sequence.py b/pype/plugins/nuke/load/load_sequence.py
index a4a591e657..b80d1a0ca1 100644
--- a/pype/plugins/nuke/load/load_sequence.py
+++ b/pype/plugins/nuke/load/load_sequence.py
@@ -101,7 +101,7 @@ class LoadSequence(api.Loader):
if namespace is None:
namespace = context['asset']['name']
- file = self.fname
+ file = self.fname.replace("\\", "/")
log.info("file: {}\n".format(self.fname))
read_name = "Read_" + context["representation"]["context"]["subset"]
@@ -112,7 +112,7 @@ class LoadSequence(api.Loader):
r = nuke.createNode(
"Read",
"name {}".format(read_name))
- r["file"].setValue(self.fname)
+ r["file"].setValue(file)
# Set colorspace defined in version data
colorspace = context["version"]["data"].get("colorspace", None)
diff --git a/pype/templates.py b/pype/templates.py
index d7748145ee..92a0e2c3c7 100644
--- a/pype/templates.py
+++ b/pype/templates.py
@@ -1,5 +1,6 @@
import os
import re
+import sys
from avalon import io
from avalon import api as avalon
from . import lib
@@ -7,9 +8,14 @@ from app.api import (Templates, Logger, format)
log = Logger.getLogger(__name__,
os.getenv("AVALON_APP", "pype-config"))
-SESSION = avalon.session
-if not SESSION:
+
+self = sys.modules[__name__]
+self.SESSION = None
+
+
+def set_session():
lib.set_io_database()
+ self.SESSION = avalon.session
def load_data_from_templates():
@@ -101,7 +107,9 @@ def set_project_code(code):
os.environ[KEY]: project code
avalon.sesion[KEY]: project code
"""
- SESSION["AVALON_PROJECTCODE"] = code
+ if self.SESSION is None:
+ set_session()
+ self.SESSION["AVALON_PROJECTCODE"] = code
os.environ["AVALON_PROJECTCODE"] = code
@@ -113,8 +121,9 @@ def get_project_name():
string: project name
"""
-
- project_name = SESSION.get("AVALON_PROJECT", None) \
+ if self.SESSION is None:
+ set_session()
+ project_name = self.SESSION.get("AVALON_PROJECT", None) \
or os.getenv("AVALON_PROJECT", None)
assert project_name, log.error("missing `AVALON_PROJECT`"
"in avalon session "
@@ -132,7 +141,9 @@ def get_asset():
Raises:
log: error
"""
- asset = SESSION.get("AVALON_ASSET", None) \
+ if self.SESSION is None:
+ set_session()
+ asset = self.SESSION.get("AVALON_ASSET", None) \
or os.getenv("AVALON_ASSET", None)
log.info("asset: {}".format(asset))
assert asset, log.error("missing `AVALON_ASSET`"
@@ -151,7 +162,9 @@ def get_task():
Raises:
log: error
"""
- task = SESSION.get("AVALON_TASK", None) \
+ if self.SESSION is None:
+ set_session()
+ task = self.SESSION.get("AVALON_TASK", None) \
or os.getenv("AVALON_TASK", None)
assert task, log.error("missing `AVALON_TASK`"
"in avalon session "
@@ -186,7 +199,9 @@ def set_hierarchy(hierarchy):
Args:
hierarchy (string): hierarchy path ("silo/folder/seq")
"""
- SESSION["AVALON_HIERARCHY"] = hierarchy
+ if self.SESSION is None:
+ set_session()
+ self.SESSION["AVALON_HIERARCHY"] = hierarchy
os.environ["AVALON_HIERARCHY"] = hierarchy
@@ -236,8 +251,10 @@ def set_avalon_workdir(project=None,
avalon.session[AVALON_WORKDIR]: workdir path
"""
- awd = SESSION.get("AVALON_WORKDIR", None) \
- or os.getenv("AVALON_WORKDIR", None)
+ if self.SESSION is None:
+ set_session()
+
+ awd = self.SESSION.get("AVALON_WORKDIR", None) or os.getenv("AVALON_WORKDIR", None)
data = get_context_data(project, hierarchy, asset, task)
if (not awd) or ("{" not in awd):
@@ -245,7 +262,7 @@ def set_avalon_workdir(project=None,
awd_filled = os.path.normpath(format(awd, data))
- SESSION["AVALON_WORKDIR"] = awd_filled
+ self.SESSION["AVALON_WORKDIR"] = awd_filled
os.environ["AVALON_WORKDIR"] = awd_filled
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))
diff --git a/pype/tools/__init__.py b/pype/tools/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pype/tools/assetcreator/__init__.py b/pype/tools/assetcreator/__init__.py
new file mode 100644
index 0000000000..3b88ebe984
--- /dev/null
+++ b/pype/tools/assetcreator/__init__.py
@@ -0,0 +1,10 @@
+
+from .app import (
+ show,
+ cli
+)
+
+__all__ = [
+ "show",
+ "cli",
+]
diff --git a/pype/tools/assetcreator/__main__.py b/pype/tools/assetcreator/__main__.py
new file mode 100644
index 0000000000..d77bc585c5
--- /dev/null
+++ b/pype/tools/assetcreator/__main__.py
@@ -0,0 +1,5 @@
+from . import cli
+
+if __name__ == '__main__':
+ import sys
+ sys.exit(cli(sys.argv[1:]))
diff --git a/pype/tools/assetcreator/app.py b/pype/tools/assetcreator/app.py
new file mode 100644
index 0000000000..6f0effbf5f
--- /dev/null
+++ b/pype/tools/assetcreator/app.py
@@ -0,0 +1,634 @@
+import os
+import sys
+import json
+from subprocess import Popen
+try:
+ import ftrack_api_old as ftrack_api
+except Exception:
+ import ftrack_api
+from pype import lib as pypelib
+from avalon.vendor.Qt import QtWidgets, QtCore
+from avalon import io, api, style, schema
+from avalon.tools import lib as parentlib
+from . import widget, model
+
+module = sys.modules[__name__]
+module.window = None
+
+
+class Window(QtWidgets.QDialog):
+ """Asset creator interface
+
+ """
+
+ def __init__(self, parent=None, context=None):
+ super(Window, self).__init__(parent)
+ self.context = context
+ project_name = io.active_project()
+ self.setWindowTitle("Asset creator ({0})".format(project_name))
+ self.setFocusPolicy(QtCore.Qt.StrongFocus)
+ self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
+
+ # Validators
+ self.valid_parent = False
+
+ self.session = None
+
+ # assets widget
+ assets_widget = QtWidgets.QWidget()
+ assets_widget.setContentsMargins(0, 0, 0, 0)
+ assets_layout = QtWidgets.QVBoxLayout(assets_widget)
+ assets = widget.AssetWidget()
+ assets.view.setSelectionMode(assets.view.ExtendedSelection)
+ assets_layout.addWidget(assets)
+
+ # Outlink
+ label_outlink = QtWidgets.QLabel("Outlink:")
+ input_outlink = QtWidgets.QLineEdit()
+ input_outlink.setReadOnly(True)
+ input_outlink.setStyleSheet("background-color: #333333;")
+ checkbox_outlink = QtWidgets.QCheckBox("Use outlink")
+ # Parent
+ label_parent = QtWidgets.QLabel("Parent:")
+ input_parent = QtWidgets.QLineEdit()
+ input_parent.setReadOnly(True)
+ input_parent.setStyleSheet("background-color: #333333;")
+
+ # Name
+ label_name = QtWidgets.QLabel("Name:")
+ input_name = QtWidgets.QLineEdit()
+ input_name.setPlaceholderText("")
+
+ # Asset Build
+ label_assetbuild = QtWidgets.QLabel("Asset Build:")
+ combo_assetbuilt = QtWidgets.QComboBox()
+
+ # Task template
+ label_task_template = QtWidgets.QLabel("Task template:")
+ combo_task_template = QtWidgets.QComboBox()
+
+ # Info widget
+ info_widget = QtWidgets.QWidget()
+ info_widget.setContentsMargins(10, 10, 10, 10)
+ info_layout = QtWidgets.QVBoxLayout(info_widget)
+
+ # Inputs widget
+ inputs_widget = QtWidgets.QWidget()
+ inputs_widget.setContentsMargins(0, 0, 0, 0)
+
+ inputs_layout = QtWidgets.QFormLayout(inputs_widget)
+ inputs_layout.addRow(label_outlink, input_outlink)
+ inputs_layout.addRow(None, checkbox_outlink)
+ inputs_layout.addRow(label_parent, input_parent)
+ inputs_layout.addRow(label_name, input_name)
+ inputs_layout.addRow(label_assetbuild, combo_assetbuilt)
+ inputs_layout.addRow(label_task_template, combo_task_template)
+
+ # Add button
+ btns_widget = QtWidgets.QWidget()
+ btns_widget.setContentsMargins(0, 0, 0, 0)
+ btn_layout = QtWidgets.QHBoxLayout(btns_widget)
+ btn_create_asset = QtWidgets.QPushButton("Create asset")
+ btn_create_asset.setToolTip(
+ "Creates all neccessary components for asset"
+ )
+ checkbox_app = None
+ if self.context is not None:
+ checkbox_app = QtWidgets.QCheckBox("Open {}".format(
+ self.context.capitalize())
+ )
+ btn_layout.addWidget(checkbox_app)
+ btn_layout.addWidget(btn_create_asset)
+
+ task_view = QtWidgets.QTreeView()
+ task_view.setIndentation(0)
+ task_model = model.TasksTemplateModel()
+ task_view.setModel(task_model)
+
+ info_layout.addWidget(inputs_widget)
+ info_layout.addWidget(task_view)
+ info_layout.addWidget(btns_widget)
+
+ # Body
+ body = QtWidgets.QSplitter()
+ body.setContentsMargins(0, 0, 0, 0)
+ body.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
+ QtWidgets.QSizePolicy.Expanding)
+ body.setOrientation(QtCore.Qt.Horizontal)
+ body.addWidget(assets_widget)
+ body.addWidget(info_widget)
+ body.setStretchFactor(0, 100)
+ body.setStretchFactor(1, 150)
+
+ # statusbar
+ message = QtWidgets.QLabel()
+ message.setFixedHeight(20)
+
+ statusbar = QtWidgets.QWidget()
+ layout = QtWidgets.QHBoxLayout(statusbar)
+ layout.setContentsMargins(0, 0, 0, 0)
+ layout.addWidget(message)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.addWidget(body)
+ layout.addWidget(statusbar)
+
+ self.data = {
+ "label": {
+ "message": message,
+ },
+ "view": {
+ "tasks": task_view
+ },
+ "model": {
+ "assets": assets,
+ "tasks": task_model
+ },
+ "inputs": {
+ "outlink": input_outlink,
+ "outlink_cb": checkbox_outlink,
+ "parent": input_parent,
+ "name": input_name,
+ "assetbuild": combo_assetbuilt,
+ "tasktemplate": combo_task_template,
+ "open_app": checkbox_app
+ },
+ "buttons": {
+ "create_asset": btn_create_asset
+ }
+ }
+
+ # signals
+ btn_create_asset.clicked.connect(self.create_asset)
+ assets.selection_changed.connect(self.on_asset_changed)
+ checkbox_outlink.toggled.connect(self.on_outlink_checkbox_change)
+ combo_task_template.currentTextChanged.connect(
+ self.on_task_template_changed
+ )
+ if self.context is not None:
+ checkbox_app.toggled.connect(self.on_app_checkbox_change)
+ # on start
+ self.on_start()
+
+ self.resize(600, 500)
+
+ self.echo("Connected to project: {0}".format(project_name))
+
+ def open_app(self):
+ if self.context == 'maya':
+ Popen("maya")
+ else:
+ message = QtWidgets.QMessageBox(self)
+ message.setWindowTitle("App is not set")
+ message.setIcon(QtWidgets.QMessageBox.Critical)
+ message.show()
+
+ def on_start(self):
+ project_name = io.Session['AVALON_PROJECT']
+ project_query = 'Project where full_name is "{}"'.format(project_name)
+ if self.session is None:
+ session = ftrack_api.Session()
+ self.session = session
+ else:
+ session = self.session
+ ft_project = session.query(project_query).one()
+ schema_name = ft_project['project_schema']['name']
+ # Load config
+ preset_path = pypelib.get_presets_path()
+ schemas_items = [preset_path, 'ftrack', 'project_schemas']
+ schema_dir = os.path.sep.join(schemas_items)
+
+ config_file = 'default.json'
+ for filename in os.listdir(schema_dir):
+ if filename.startswith(schema_name):
+ config_file = filename
+ break
+ config_file = os.path.sep.join([schema_dir, config_file])
+ with open(config_file) as data_file:
+ self.config_data = json.load(data_file)
+
+ # set outlink
+ input_outlink = self.data['inputs']['outlink']
+ checkbox_outlink = self.data['inputs']['outlink_cb']
+ outlink_text = io.Session.get('AVALON_ASSET', '')
+ checkbox_outlink.setChecked(True)
+ if outlink_text == '':
+ outlink_text = '< No context >'
+ checkbox_outlink.setChecked(False)
+ checkbox_outlink.hide()
+ input_outlink.setText(outlink_text)
+
+ # load asset build types
+ self.load_assetbuild_types()
+
+ # Load task templates
+ self.load_task_templates()
+ self.data["model"]["assets"].refresh()
+ self.on_asset_changed()
+
+ def create_asset(self):
+ name_input = self.data['inputs']['name']
+ name = name_input.text()
+ test_name = name.replace(' ', '')
+ error_message = None
+ message = QtWidgets.QMessageBox(self)
+ message.setWindowTitle("Some errors has occured")
+ message.setIcon(QtWidgets.QMessageBox.Critical)
+ # TODO: show error messages on any error
+ if self.valid_parent is not True and test_name == '':
+ error_message = "Name is not set and Parent is not selected"
+ elif self.valid_parent is not True:
+ error_message = "Parent is not selected"
+ elif test_name == '':
+ error_message = "Name is not set"
+
+ if error_message is not None:
+ message.setText(error_message)
+ message.show()
+ return
+
+ test_name_exists = io.find({
+ 'type': 'asset',
+ 'name': name
+ })
+ existing_assets = [x for x in test_name_exists]
+ if len(existing_assets) > 0:
+ message.setText("Entered Asset name is occupied")
+ message.show()
+ return
+
+ checkbox_app = self.data['inputs']['open_app']
+ if checkbox_app is not None and checkbox_app.isChecked() is True:
+ task_view = self.data["view"]["tasks"]
+ task_model = self.data["model"]["tasks"]
+ try:
+ index = task_view.selectedIndexes()[0]
+ task_name = task_model.itemData(index)[0]
+ except Exception:
+ message.setText("Please select task")
+ message.show()
+ return
+
+ # Get ftrack session
+ if self.session is None:
+ session = ftrack_api.Session()
+ self.session = session
+ else:
+ session = self.session
+
+ # Get Ftrack project entity
+ project_name = io.Session['AVALON_PROJECT']
+ project_query = 'Project where full_name is "{}"'.format(project_name)
+ try:
+ ft_project = session.query(project_query).one()
+ except Exception:
+ message.setText("Ftrack project was not found")
+ message.show()
+ return
+
+ # Get Ftrack entity of parent
+ ft_parent = None
+ assets_model = self.data["model"]["assets"]
+ selected = assets_model.get_selected_assets()
+ parent = io.find_one({"_id": selected[0], "type": "asset"})
+ asset_id = parent.get('data', {}).get('ftrackId', None)
+ asset_entity_type = parent.get('data', {}).get('entityType', None)
+ asset_query = '{} where id is "{}"'
+ if asset_id is not None and asset_entity_type is not None:
+ try:
+ ft_parent = session.query(asset_query.format(
+ asset_entity_type, asset_id)
+ ).one()
+ except Exception:
+ ft_parent = None
+
+ if ft_parent is None:
+ ft_parent = self.get_ftrack_asset(parent, ft_project)
+
+ if ft_parent is None:
+ message.setText("Parent's Ftrack entity was not found")
+ message.show()
+ return
+
+ asset_build_combo = self.data['inputs']['assetbuild']
+ asset_type_name = asset_build_combo.currentText()
+ asset_type_query = 'Type where name is "{}"'.format(asset_type_name)
+ try:
+ asset_type = session.query(asset_type_query).one()
+ except Exception:
+ message.setText("Selected Asset Build type does not exists")
+ message.show()
+ return
+
+ for children in ft_parent['children']:
+ if children['name'] == name:
+ message.setText("Entered Asset name is occupied")
+ message.show()
+ return
+
+ task_template_combo = self.data['inputs']['tasktemplate']
+ task_template = task_template_combo.currentText()
+ tasks = []
+ for template in self.config_data['task_templates']:
+ if template['name'] == task_template:
+ tasks = template['task_types']
+ break
+
+ available_task_types = []
+ task_types = ft_project['project_schema']['_task_type_schema']
+ for task_type in task_types['types']:
+ available_task_types.append(task_type['name'])
+
+ not_possible_tasks = []
+ for task in tasks:
+ if task not in available_task_types:
+ not_possible_tasks.append(task)
+
+ if len(not_possible_tasks) != 0:
+ message.setText((
+ "These Task types weren't found"
+ " in Ftrack project schema:\n{}").format(
+ ', '.join(not_possible_tasks))
+ )
+ message.show()
+ return
+
+ # Create asset build
+ asset_build_data = {
+ 'name': name,
+ 'project_id': ft_project['id'],
+ 'parent_id': ft_parent['id'],
+ 'type': asset_type
+ }
+
+ new_entity = session.create('AssetBuild', asset_build_data)
+
+ task_data = {
+ 'project_id': ft_project['id'],
+ 'parent_id': new_entity['id']
+ }
+
+ for task in tasks:
+ type = session.query('Type where name is "{}"'.format(task)).one()
+
+ task_data['type_id'] = type['id']
+ task_data['name'] = task
+ session.create('Task', task_data)
+
+ av_project = io.find_one({'type': 'project'})
+ silo = parent['silo']
+ if silo is None:
+ silo = parent['name']
+
+ hiearchy_items = []
+ hiearchy_items.extend(self.get_avalon_parent(parent))
+ hiearchy_items.append(parent['name'])
+
+ hierarchy = os.path.sep.join(hiearchy_items)
+ new_asset_data = {
+ 'ftrackId': new_entity['id'],
+ 'entityType': new_entity.entity_type,
+ 'visualParent': parent['_id'],
+ 'tasks': tasks,
+ 'parents': hiearchy_items,
+ 'hierarchy': hierarchy
+ }
+ new_asset_info = {
+ 'parent': av_project['_id'],
+ 'name': name,
+ 'schema': pypelib.get_avalon_asset_template_schema(),
+ 'silo': silo,
+ 'type': 'asset',
+ 'data': new_asset_data
+ }
+ try:
+ schema.validate(new_asset_info)
+ except Exception:
+ message.setText((
+ 'Asset information are not valid'
+ ' to create asset in avalon database'
+ ))
+ message.show()
+ session.rollback()
+ return
+ io.insert_one(new_asset_info)
+ session.commit()
+
+ outlink_cb = self.data['inputs']['outlink_cb']
+ if outlink_cb.isChecked() is True:
+ outlink_input = self.data['inputs']['outlink']
+ outlink_name = outlink_input.text()
+ outlink_asset = io.find_one({
+ 'type': 'asset',
+ 'name': outlink_name
+ })
+ outlink_ft_id = outlink_asset.get('data', {}).get('ftrackId', None)
+ outlink_entity_type = outlink_asset.get(
+ 'data', {}
+ ).get('entityType', None)
+ if outlink_ft_id is not None and outlink_entity_type is not None:
+ try:
+ outlink_entity = session.query(asset_query.format()).one()
+ except Exception:
+ outlink_entity = None
+
+ if outlink_entity is None:
+ outlink_entity = self.get_ftrack_asset(
+ outlink_asset, ft_project
+ )
+
+ if outlink_entity is None:
+ message.setText("Outlink's Ftrack entity was not found")
+ message.show()
+ return
+
+ link_data = {
+ 'from_id': new_entity['id'],
+ 'to_id': outlink_entity['id']
+ }
+ session.create('TypedContextLink', link_data)
+ session.commit()
+
+ if checkbox_app is not None and checkbox_app.isChecked() is True:
+ origin_asset = api.Session.get('AVALON_ASSET', None)
+ origin_task = api.Session.get('AVALON_TASK', None)
+ asset_name = name
+ task_view = self.data["view"]["tasks"]
+ task_model = self.data["model"]["tasks"]
+ try:
+ index = task_view.selectedIndexes()[0]
+ except Exception:
+ message.setText("No task is selected. App won't be launched")
+ message.show()
+ return
+ task_name = task_model.itemData(index)[0]
+ try:
+ api.update_current_task(task=task_name, asset=asset_name)
+ self.open_app()
+
+ finally:
+ if origin_task is not None and origin_asset is not None:
+ api.update_current_task(
+ task=origin_task, asset=origin_asset
+ )
+
+ message.setWindowTitle("Asset Created")
+ message.setText("Asset Created successfully")
+ message.setIcon(QtWidgets.QMessageBox.Information)
+ message.show()
+
+ def get_ftrack_asset(self, asset, ft_project):
+ parenthood = []
+ parenthood.extend(self.get_avalon_parent(asset))
+ parenthood.append(asset['name'])
+ parenthood = list(reversed(parenthood))
+ output_entity = None
+ ft_entity = ft_project
+ index = len(parenthood) - 1
+ while True:
+ name = parenthood[index]
+ found = False
+ for children in ft_entity['children']:
+ if children['name'] == name:
+ ft_entity = children
+ found = True
+ break
+ if found is False:
+ return None
+ if index == 0:
+ output_entity = ft_entity
+ break
+ index -= 1
+
+ return output_entity
+
+ def get_avalon_parent(self, entity):
+ parent_id = entity['data']['visualParent']
+ parents = []
+ if parent_id is not None:
+ parent = io.find_one({'_id': parent_id})
+ parents.extend(self.get_avalon_parent(parent))
+ parents.append(parent['name'])
+ return parents
+
+ def echo(self, message):
+ widget = self.data["label"]["message"]
+ widget.setText(str(message))
+
+ QtCore.QTimer.singleShot(5000, lambda: widget.setText(""))
+
+ print(message)
+
+ def load_task_templates(self):
+ templates = self.config_data.get('task_templates', [])
+ all_names = []
+ for template in templates:
+ all_names.append(template['name'])
+
+ tt_combobox = self.data['inputs']['tasktemplate']
+ tt_combobox.clear()
+ tt_combobox.addItems(all_names)
+
+ def load_assetbuild_types(self):
+ types = []
+ schemas = self.config_data.get('schemas', [])
+ for _schema in schemas:
+ if _schema['object_type'] == 'Asset Build':
+ types = _schema['task_types']
+ break
+ ab_combobox = self.data['inputs']['assetbuild']
+ ab_combobox.clear()
+ ab_combobox.addItems(types)
+
+ def on_app_checkbox_change(self):
+ task_model = self.data['model']['tasks']
+ app_checkbox = self.data['inputs']['open_app']
+ if app_checkbox.isChecked() is True:
+ task_model.selectable = True
+ else:
+ task_model.selectable = False
+
+ def on_outlink_checkbox_change(self):
+ checkbox_outlink = self.data['inputs']['outlink_cb']
+ outlink_input = self.data['inputs']['outlink']
+ if checkbox_outlink.isChecked() is True:
+ outlink_text = io.Session['AVALON_ASSET']
+ else:
+ outlink_text = '< Outlinks won\'t be set >'
+
+ outlink_input.setText(outlink_text)
+
+ def on_task_template_changed(self):
+ combobox = self.data['inputs']['tasktemplate']
+ task_model = self.data['model']['tasks']
+ name = combobox.currentText()
+ tasks = []
+ for template in self.config_data['task_templates']:
+ if template['name'] == name:
+ tasks = template['task_types']
+ break
+ task_model.set_tasks(tasks)
+
+ def on_asset_changed(self):
+ """Callback on asset selection changed
+
+ This updates the task view.
+
+ """
+ assets_model = self.data["model"]["assets"]
+ parent_input = self.data['inputs']['parent']
+ selected = assets_model.get_selected_assets()
+ if len(selected) > 1:
+ self.valid_parent = False
+ parent_input.setText('< Please select only one asset! >')
+ elif len(selected) == 1:
+ self.valid_parent = True
+ asset = io.find_one({"_id": selected[0], "type": "asset"})
+ parent_input.setText(asset['name'])
+ else:
+ self.valid_parent = False
+ parent_input.setText('< Nothing is selected >')
+
+
+def show(parent=None, debug=False, context=None):
+ """Display Loader GUI
+
+ Arguments:
+ debug (bool, optional): Run loader in debug-mode,
+ defaults to False
+
+ """
+
+ try:
+ module.window.close()
+ del module.window
+ except (RuntimeError, AttributeError):
+ pass
+
+ if debug is True:
+ io.install()
+
+ with parentlib.application():
+ window = Window(parent, context)
+ window.setStyleSheet(style.load_stylesheet())
+ window.show()
+
+ module.window = window
+
+
+def cli(args):
+ import argparse
+ parser = argparse.ArgumentParser()
+ parser.add_argument("project")
+ parser.add_argument("asset")
+
+ args = parser.parse_args(args)
+ project = args.project
+ asset = args.asset
+ io.install()
+
+ api.Session["AVALON_PROJECT"] = project
+ if asset != '':
+ api.Session["AVALON_ASSET"] = asset
+
+ show()
diff --git a/pype/tools/assetcreator/model.py b/pype/tools/assetcreator/model.py
new file mode 100644
index 0000000000..0f74b1140c
--- /dev/null
+++ b/pype/tools/assetcreator/model.py
@@ -0,0 +1,315 @@
+import re
+import logging
+import collections
+
+from avalon.vendor.Qt import QtCore, QtWidgets
+from avalon.vendor import qtawesome as awesome
+from avalon import io
+from avalon import style
+
+log = logging.getLogger(__name__)
+
+
+class Node(dict):
+ """A node that can be represented in a tree view.
+
+ The node can store data just like a dictionary.
+
+ >>> data = {"name": "John", "score": 10}
+ >>> node = Node(data)
+ >>> assert node["name"] == "John"
+
+ """
+
+ def __init__(self, data=None):
+ super(Node, self).__init__()
+
+ self._children = list()
+ self._parent = None
+
+ if data is not None:
+ assert isinstance(data, dict)
+ self.update(data)
+
+ def childCount(self):
+ return len(self._children)
+
+ def child(self, row):
+
+ if row >= len(self._children):
+ log.warning("Invalid row as child: {0}".format(row))
+ return
+
+ return self._children[row]
+
+ def children(self):
+ return self._children
+
+ def parent(self):
+ return self._parent
+
+ def row(self):
+ """
+ Returns:
+ int: Index of this node under parent"""
+ if self._parent is not None:
+ siblings = self.parent().children()
+ return siblings.index(self)
+
+ def add_child(self, child):
+ """Add a child to this node"""
+ child._parent = self
+ self._children.append(child)
+
+
+class TreeModel(QtCore.QAbstractItemModel):
+
+ COLUMNS = list()
+ NodeRole = QtCore.Qt.UserRole + 1
+
+ def __init__(self, parent=None):
+ super(TreeModel, self).__init__(parent)
+ self._root_node = Node()
+
+ def rowCount(self, parent):
+ if parent.isValid():
+ node = parent.internalPointer()
+ else:
+ node = self._root_node
+
+ return node.childCount()
+
+ def columnCount(self, parent):
+ return len(self.COLUMNS)
+
+ def data(self, index, role):
+
+ if not index.isValid():
+ return None
+
+ if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
+
+ node = index.internalPointer()
+ column = index.column()
+
+ key = self.COLUMNS[column]
+ return node.get(key, None)
+
+ if role == self.NodeRole:
+ return index.internalPointer()
+
+ def setData(self, index, value, role=QtCore.Qt.EditRole):
+ """Change the data on the nodes.
+
+ Returns:
+ bool: Whether the edit was successful
+ """
+
+ if index.isValid():
+ if role == QtCore.Qt.EditRole:
+
+ node = index.internalPointer()
+ column = index.column()
+ key = self.COLUMNS[column]
+ node[key] = value
+
+ # passing `list()` for PyQt5 (see PYSIDE-462)
+ self.dataChanged.emit(index, index, list())
+
+ # must return true if successful
+ return True
+
+ return False
+
+ def setColumns(self, keys):
+ assert isinstance(keys, (list, tuple))
+ self.COLUMNS = keys
+
+ def headerData(self, section, orientation, role):
+
+ if role == QtCore.Qt.DisplayRole:
+ if section < len(self.COLUMNS):
+ return self.COLUMNS[section]
+
+ super(TreeModel, self).headerData(section, orientation, role)
+
+ def flags(self, index):
+ return (
+ QtCore.Qt.ItemIsEnabled |
+ QtCore.Qt.ItemIsSelectable
+ )
+
+ def parent(self, index):
+
+ node = index.internalPointer()
+ parent_node = node.parent()
+
+ # If it has no parents we return invalid
+ if parent_node == self._root_node or not parent_node:
+ return QtCore.QModelIndex()
+
+ return self.createIndex(parent_node.row(), 0, parent_node)
+
+ def index(self, row, column, parent):
+ """Return index for row/column under parent"""
+
+ if not parent.isValid():
+ parentNode = self._root_node
+ else:
+ parentNode = parent.internalPointer()
+
+ childItem = parentNode.child(row)
+ if childItem:
+ return self.createIndex(row, column, childItem)
+ else:
+ return QtCore.QModelIndex()
+
+ def add_child(self, node, parent=None):
+ if parent is None:
+ parent = self._root_node
+
+ parent.add_child(node)
+
+ def column_name(self, column):
+ """Return column key by index"""
+
+ if column < len(self.COLUMNS):
+ return self.COLUMNS[column]
+
+ def clear(self):
+ self.beginResetModel()
+ self._root_node = Node()
+ self.endResetModel()
+
+
+class TasksTemplateModel(TreeModel):
+ """A model listing the tasks combined for a list of assets"""
+
+ COLUMNS = ["Tasks"]
+
+ def __init__(self):
+ super(TasksTemplateModel, self).__init__()
+ self.selectable = False
+ self._icons = {
+ "__default__": awesome.icon("fa.folder-o",
+ color=style.colors.default)
+ }
+
+ def set_tasks(self, tasks):
+ """Set assets to track by their database id
+
+ Arguments:
+ asset_ids (list): List of asset ids.
+
+ """
+
+ self.clear()
+
+ # let cleared task view if no tasks are available
+ if len(tasks) == 0:
+ return
+
+ self.beginResetModel()
+
+ icon = self._icons["__default__"]
+ for task in tasks:
+ node = Node({
+ "Tasks": task,
+ "icon": icon
+ })
+
+ self.add_child(node)
+
+ self.endResetModel()
+
+ def flags(self, index):
+ if self.selectable is False:
+ return QtCore.Qt.ItemIsEnabled
+ else:
+ return (
+ QtCore.Qt.ItemIsEnabled |
+ QtCore.Qt.ItemIsSelectable
+ )
+
+ def data(self, index, role):
+
+ if not index.isValid():
+ return
+
+ # Add icon to the first column
+ if role == QtCore.Qt.DecorationRole:
+ if index.column() == 0:
+ return index.internalPointer()['icon']
+
+ return super(TasksTemplateModel, self).data(index, role)
+
+
+class DeselectableTreeView(QtWidgets.QTreeView):
+ """A tree view that deselects on clicking on an empty area in the view"""
+
+ def mousePressEvent(self, event):
+
+ index = self.indexAt(event.pos())
+ if not index.isValid():
+ # clear the selection
+ self.clearSelection()
+ # clear the current index
+ self.setCurrentIndex(QtCore.QModelIndex())
+
+ QtWidgets.QTreeView.mousePressEvent(self, event)
+
+
+class ExactMatchesFilterProxyModel(QtCore.QSortFilterProxyModel):
+ """Filter model to where key column's value is in the filtered tags"""
+
+ def __init__(self, *args, **kwargs):
+ super(ExactMatchesFilterProxyModel, self).__init__(*args, **kwargs)
+ self._filters = set()
+
+ def setFilters(self, filters):
+ self._filters = set(filters)
+
+ def filterAcceptsRow(self, source_row, source_parent):
+
+ # No filter
+ if not self._filters:
+ return True
+
+ else:
+ model = self.sourceModel()
+ column = self.filterKeyColumn()
+ idx = model.index(source_row, column, source_parent)
+ data = model.data(idx, self.filterRole())
+ if data in self._filters:
+ return True
+ else:
+ return False
+
+
+class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel):
+ """Filters to the regex if any of the children matches allow parent"""
+ def filterAcceptsRow(self, row, parent):
+
+ regex = self.filterRegExp()
+ if not regex.isEmpty():
+ pattern = regex.pattern()
+ model = self.sourceModel()
+ source_index = model.index(row, self.filterKeyColumn(), parent)
+ if source_index.isValid():
+
+ # Check current index itself
+ key = model.data(source_index, self.filterRole())
+ if re.search(pattern, key, re.IGNORECASE):
+ return True
+
+ # Check children
+ rows = model.rowCount(source_index)
+ for i in range(rows):
+ if self.filterAcceptsRow(i, source_index):
+ return True
+
+ # Otherwise filter it
+ return False
+
+ return super(RecursiveSortFilterProxyModel,
+ self).filterAcceptsRow(row, parent)
diff --git a/pype/tools/assetcreator/widget.py b/pype/tools/assetcreator/widget.py
new file mode 100644
index 0000000000..c6fa10697f
--- /dev/null
+++ b/pype/tools/assetcreator/widget.py
@@ -0,0 +1,394 @@
+import logging
+import contextlib
+
+from avalon.vendor import qtawesome as awesome
+from avalon.vendor.Qt import QtWidgets, QtCore, QtGui
+from avalon import io
+from avalon import style
+
+from .model import (
+ TreeModel,
+ Node,
+ RecursiveSortFilterProxyModel,
+ DeselectableTreeView
+)
+
+log = logging.getLogger(__name__)
+
+
+def _iter_model_rows(model,
+ column,
+ include_root=False):
+ """Iterate over all row indices in a model"""
+ indices = [QtCore.QModelIndex()] # start iteration at root
+
+ for index in indices:
+
+ # Add children to the iterations
+ child_rows = model.rowCount(index)
+ for child_row in range(child_rows):
+ child_index = model.index(child_row, column, index)
+ indices.append(child_index)
+
+ if not include_root and not index.isValid():
+ continue
+
+ yield index
+
+
+@contextlib.contextmanager
+def preserve_expanded_rows(tree_view,
+ column=0,
+ role=QtCore.Qt.DisplayRole):
+ """Preserves expanded row in QTreeView by column's data role.
+
+ This function is created to maintain the expand vs collapse status of
+ the model items. When refresh is triggered the items which are expanded
+ will stay expanded and vise versa.
+
+ Arguments:
+ tree_view (QWidgets.QTreeView): the tree view which is
+ nested in the application
+ column (int): the column to retrieve the data from
+ role (int): the role which dictates what will be returned
+
+ Returns:
+ None
+
+ """
+
+ model = tree_view.model()
+
+ expanded = set()
+
+ for index in _iter_model_rows(model,
+ column=column,
+ include_root=False):
+ if tree_view.isExpanded(index):
+ value = index.data(role)
+ expanded.add(value)
+
+ try:
+ yield
+ finally:
+ if not expanded:
+ return
+
+ for index in _iter_model_rows(model,
+ column=column,
+ include_root=False):
+ value = index.data(role)
+ state = value in expanded
+ if state:
+ tree_view.expand(index)
+ else:
+ tree_view.collapse(index)
+
+
+@contextlib.contextmanager
+def preserve_selection(tree_view,
+ column=0,
+ role=QtCore.Qt.DisplayRole,
+ current_index=True):
+ """Preserves row selection in QTreeView by column's data role.
+
+ This function is created to maintain the selection status of
+ the model items. When refresh is triggered the items which are expanded
+ will stay expanded and vise versa.
+
+ tree_view (QWidgets.QTreeView): the tree view nested in the application
+ column (int): the column to retrieve the data from
+ role (int): the role which dictates what will be returned
+
+ Returns:
+ None
+
+ """
+
+ model = tree_view.model()
+ selection_model = tree_view.selectionModel()
+ flags = selection_model.Select | selection_model.Rows
+
+ if current_index:
+ current_index_value = tree_view.currentIndex().data(role)
+ else:
+ current_index_value = None
+
+ selected_rows = selection_model.selectedRows()
+ if not selected_rows:
+ yield
+ return
+
+ selected = set(row.data(role) for row in selected_rows)
+ try:
+ yield
+ finally:
+ if not selected:
+ return
+
+ # Go through all indices, select the ones with similar data
+ for index in _iter_model_rows(model,
+ column=column,
+ include_root=False):
+
+ value = index.data(role)
+ state = value in selected
+ if state:
+ tree_view.scrollTo(index) # Ensure item is visible
+ selection_model.select(index, flags)
+
+ if current_index_value and value == current_index_value:
+ tree_view.setCurrentIndex(index)
+
+
+class AssetModel(TreeModel):
+ """A model listing assets in the silo in the active project.
+
+ The assets are displayed in a treeview, they are visually parented by
+ a `visualParent` field in the database containing an `_id` to a parent
+ asset.
+
+ """
+
+ COLUMNS = ["label"]
+ Name = 0
+ Deprecated = 2
+ ObjectId = 3
+
+ DocumentRole = QtCore.Qt.UserRole + 2
+ ObjectIdRole = QtCore.Qt.UserRole + 3
+
+ def __init__(self, parent=None):
+ super(AssetModel, self).__init__(parent=parent)
+ self.refresh()
+
+ def _add_hierarchy(self, parent=None):
+
+ # Find the assets under the parent
+ find_data = {
+ "type": "asset"
+ }
+ if parent is None:
+ find_data['$or'] = [
+ {'data.visualParent': {'$exists': False}},
+ {'data.visualParent': None}
+ ]
+ else:
+ find_data["data.visualParent"] = parent['_id']
+
+ assets = io.find(find_data).sort('name', 1)
+ for asset in assets:
+ # get label from data, otherwise use name
+ data = asset.get("data", {})
+ label = data.get("label", asset['name'])
+ tags = data.get("tags", [])
+
+ # store for the asset for optimization
+ deprecated = "deprecated" in tags
+
+ node = Node({
+ "_id": asset['_id'],
+ "name": asset["name"],
+ "label": label,
+ "type": asset['type'],
+ "tags": ", ".join(tags),
+ "deprecated": deprecated,
+ "_document": asset
+ })
+ self.add_child(node, parent=parent)
+
+ # Add asset's children recursively
+ self._add_hierarchy(node)
+
+ def refresh(self):
+ """Refresh the data for the model."""
+
+ self.clear()
+ self.beginResetModel()
+ self._add_hierarchy(parent=None)
+ self.endResetModel()
+
+ def flags(self, index):
+ return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
+
+ def data(self, index, role):
+
+ if not index.isValid():
+ return
+
+ node = index.internalPointer()
+ if role == QtCore.Qt.DecorationRole: # icon
+
+ column = index.column()
+ if column == self.Name:
+
+ # Allow a custom icon and custom icon color to be defined
+ data = node["_document"]["data"]
+ icon = data.get("icon", None)
+ color = data.get("color", style.colors.default)
+
+ if icon is None:
+ # Use default icons if no custom one is specified.
+ # If it has children show a full folder, otherwise
+ # show an open folder
+ has_children = self.rowCount(index) > 0
+ icon = "folder" if has_children else "folder-o"
+
+ # Make the color darker when the asset is deprecated
+ if node.get("deprecated", False):
+ color = QtGui.QColor(color).darker(250)
+
+ try:
+ key = "fa.{0}".format(icon) # font-awesome key
+ icon = awesome.icon(key, color=color)
+ return icon
+ except Exception as exception:
+ # Log an error message instead of erroring out completely
+ # when the icon couldn't be created (e.g. invalid name)
+ log.error(exception)
+
+ return
+
+ if role == QtCore.Qt.ForegroundRole: # font color
+ if "deprecated" in node.get("tags", []):
+ return QtGui.QColor(style.colors.light).darker(250)
+
+ if role == self.ObjectIdRole:
+ return node.get("_id", None)
+
+ if role == self.DocumentRole:
+ return node.get("_document", None)
+
+ return super(AssetModel, self).data(index, role)
+
+
+class AssetView(DeselectableTreeView):
+ """Item view.
+
+ This implements a context menu.
+
+ """
+
+ def __init__(self):
+ super(AssetView, self).__init__()
+ self.setIndentation(15)
+ self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
+ self.setHeaderHidden(True)
+
+
+class AssetWidget(QtWidgets.QWidget):
+ """A Widget to display a tree of assets with filter
+
+ To list the assets of the active project:
+ >>> # widget = AssetWidget()
+ >>> # widget.refresh()
+ >>> # widget.show()
+
+ """
+
+ silo_changed = QtCore.Signal(str) # on silo combobox change
+ assets_refreshed = QtCore.Signal() # on model refresh
+ selection_changed = QtCore.Signal() # on view selection change
+ current_changed = QtCore.Signal() # on view current index change
+
+ def __init__(self, parent=None):
+ super(AssetWidget, self).__init__(parent=parent)
+ self.setContentsMargins(0, 0, 0, 0)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.setContentsMargins(0, 0, 0, 0)
+ layout.setSpacing(4)
+
+ # Tree View
+ model = AssetModel()
+ proxy = RecursiveSortFilterProxyModel()
+ proxy.setSourceModel(model)
+ proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
+ view = AssetView()
+ view.setModel(proxy)
+
+ # Header
+ header = QtWidgets.QHBoxLayout()
+
+ icon = awesome.icon("fa.refresh", color=style.colors.light)
+ refresh = QtWidgets.QPushButton(icon, "")
+ refresh.setToolTip("Refresh items")
+
+ filter = QtWidgets.QLineEdit()
+ filter.textChanged.connect(proxy.setFilterFixedString)
+ filter.setPlaceholderText("Filter assets..")
+
+ header.addWidget(filter)
+ header.addWidget(refresh)
+
+ # Layout
+ layout.addLayout(header)
+ layout.addWidget(view)
+
+ # Signals/Slots
+ selection = view.selectionModel()
+ selection.selectionChanged.connect(self.selection_changed)
+ selection.currentChanged.connect(self.current_changed)
+ refresh.clicked.connect(self.refresh)
+
+ self.refreshButton = refresh
+ self.model = model
+ self.proxy = proxy
+ self.view = view
+
+ def _refresh_model(self):
+ self.model.refresh()
+ self.assets_refreshed.emit()
+
+ def refresh(self):
+ self._refresh_model()
+
+ def get_active_asset(self):
+ """Return the asset id the current asset."""
+ current = self.view.currentIndex()
+ return current.data(self.model.ObjectIdRole)
+
+ def get_active_index(self):
+ return self.view.currentIndex()
+
+ def get_selected_assets(self):
+ """Return the assets' ids that are selected."""
+ selection = self.view.selectionModel()
+ rows = selection.selectedRows()
+ return [row.data(self.model.ObjectIdRole) for row in rows]
+
+ def select_assets(self, assets, expand=True):
+ """Select assets by name.
+
+ Args:
+ assets (list): List of asset names
+ expand (bool): Whether to also expand to the asset in the view
+
+ Returns:
+ None
+
+ """
+ # TODO: Instead of individual selection optimize for many assets
+
+ assert isinstance(assets,
+ (tuple, list)), "Assets must be list or tuple"
+
+ # Clear selection
+ selection_model = self.view.selectionModel()
+ selection_model.clearSelection()
+
+ # Select
+ mode = selection_model.Select | selection_model.Rows
+ for index in _iter_model_rows(self.proxy,
+ column=0,
+ include_root=False):
+ data = index.data(self.model.NodeRole)
+ name = data['name']
+ if name in assets:
+ selection_model.select(index, mode)
+
+ if expand:
+ self.view.expand(index)
+
+ # Set the currently active index
+ self.view.setCurrentIndex(index)
diff --git a/res/icons/Thumbs.db b/res/icons/Thumbs.db
new file mode 100644
index 0000000000..fa56c871f6
Binary files /dev/null and b/res/icons/Thumbs.db differ
diff --git a/res/icons/lookmanager.png b/res/icons/lookmanager.png
new file mode 100644
index 0000000000..9ed1d3db8e
Binary files /dev/null and b/res/icons/lookmanager.png differ
diff --git a/setup/houdini/MainMenuCommon.XML b/setup/houdini/MainMenuCommon.XML
new file mode 100644
index 0000000000..16e92be688
--- /dev/null
+++ b/setup/houdini/MainMenuCommon.XML
@@ -0,0 +1,79 @@
+
+
+
+
+
+
diff --git a/setup/houdini/scripts/123.py b/setup/houdini/scripts/123.py
new file mode 100644
index 0000000000..499a0e2931
--- /dev/null
+++ b/setup/houdini/scripts/123.py
@@ -0,0 +1,10 @@
+from avalon import pipeline, houdini
+import hou
+
+
+def main():
+ print("Installing Avalon ...")
+ pipeline.install(houdini)
+
+
+main()