Merge branch 'develop' of bitbucket.org:pypeclub/pype-config into develop

This commit is contained in:
Milan Kolar 2019-04-08 17:32:21 +02:00
commit 3f71fb0f53
31 changed files with 1459 additions and 47 deletions

View file

@ -0,0 +1,9 @@
from .clockify_api import ClockifyAPI
from .widget_settings import ClockifySettings
from .clockify import ClockifyModule
__all__ = [
'ClockifyAPI',
'ClockifySettings',
'ClockifyModule'
]

97
pype/clockify/clockify.py Normal file
View file

@ -0,0 +1,97 @@
import threading
from app import style
from app.vendor.Qt import QtWidgets
from pype.clockify import ClockifySettings, ClockifyAPI
class ClockifyModule:
def __init__(self, main_parent=None, parent=None):
self.main_parent = main_parent
self.parent = parent
self.clockapi = ClockifyAPI()
self.widget_settings = ClockifySettings(main_parent, self)
self.widget_settings_required = None
self.thread_timer_check = None
# Bools
self.bool_thread_check_running = False
self.bool_api_key_set = False
self.bool_workspace_set = False
self.bool_timer_run = False
def start_up(self):
self.clockapi.set_master(self)
self.bool_api_key_set = self.clockapi.set_api()
if self.bool_api_key_set is False:
self.show_settings()
return
self.bool_workspace_set = self.clockapi.workspace_id is not None
if self.bool_workspace_set is False:
return
self.start_timer_check()
self.set_menu_visibility()
def start_timer_check(self):
self.bool_thread_check_running = True
if self.thread_timer_check is None:
self.thread_timer_check = threading.Thread(
target=self.check_running
)
self.thread_timer_check.daemon = True
self.thread_timer_check.start()
def stop_timer_check(self):
self.bool_thread_check_running = True
if self.thread_timer_check is not None:
self.thread_timer_check.join()
self.thread_timer_check = None
def check_running(self):
import time
while self.bool_thread_check_running is True:
if self.clockapi.get_in_progress() is not None:
self.bool_timer_run = True
else:
self.bool_timer_run = False
self.set_menu_visibility()
time.sleep(5)
def stop_timer(self):
self.clockapi.finish_time_entry()
self.bool_timer_run = False
# Definition of Tray menu
def tray_menu(self, parent):
# Menu for Tray App
self.menu = QtWidgets.QMenu('Clockify', parent)
self.menu.setProperty('submenu', 'on')
self.menu.setStyleSheet(style.load_stylesheet())
# Actions
self.aShowSettings = QtWidgets.QAction(
"Settings", self.menu
)
self.aStopTimer = QtWidgets.QAction(
"Stop timer", self.menu
)
self.menu.addAction(self.aShowSettings)
self.menu.addAction(self.aStopTimer)
self.aShowSettings.triggered.connect(self.show_settings)
self.aStopTimer.triggered.connect(self.stop_timer)
self.set_menu_visibility()
return self.menu
def show_settings(self):
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
self.widget_settings.show()
def set_menu_visibility(self):
self.aStopTimer.setVisible(self.bool_timer_run)

View file

@ -0,0 +1,434 @@
import os
import requests
import json
import datetime
import appdirs
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(
Singleton, cls
).__call__(*args, **kwargs)
return cls._instances[cls]
class ClockifyAPI(metaclass=Singleton):
endpoint = "https://api.clockify.me/api/"
headers = {"X-Api-Key": None}
app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
file_name = 'clockify.json'
fpath = os.path.join(app_dir, file_name)
master_parent = None
workspace_id = None
def set_master(self, master_parent):
self.master_parent = master_parent
def verify_api(self):
for key, value in self.headers.items():
if value is None or value.strip() == '':
return False
return True
def set_api(self, api_key=None):
if api_key is None:
api_key = self.get_api_key()
if api_key is not None and self.validate_api_key(api_key) is True:
self.headers["X-Api-Key"] = api_key
self.set_workspace()
return True
return False
def validate_api_key(self, api_key):
test_headers = {'X-Api-Key': api_key}
action_url = 'workspaces/'
response = requests.get(
self.endpoint + action_url,
headers=test_headers
)
if response.status_code != 200:
return False
return True
def validate_workspace_perm(self):
test_project = '__test__'
action_url = 'workspaces/{}/projects/'.format(self.workspace_id)
body = {
"name": test_project, "clientId": "", "isPublic": "false",
"estimate": {"type": "AUTO"},
"color": "#f44336", "billable": "true"
}
response = requests.post(
self.endpoint + action_url,
headers=self.headers, json=body
)
if response.status_code == 201:
self.delete_project(self.get_project_id(test_project))
return True
else:
projects = self.get_projects()
if test_project in projects:
try:
self.delete_project(self.get_project_id(test_project))
return True
except json.decoder.JSONDecodeError:
return False
return False
def set_workspace(self, name=None):
if name is None:
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
self.workspace = name
self.workspace_id = None
if self.workspace is None:
return
try:
result = self.validate_workspace()
except Exception:
result = False
if result is not False:
self.workspace_id = result
if self.master_parent is not None:
self.master_parent.start_timer_check()
return True
return False
def validate_workspace(self, name=None):
if name is None:
name = self.workspace
all_workspaces = self.get_workspaces()
if name in all_workspaces:
return all_workspaces[name]
return False
def get_api_key(self):
api_key = None
try:
file = open(self.fpath, 'r')
api_key = json.load(file).get('api_key', None)
if api_key == '':
api_key = None
except Exception:
file = open(self.fpath, 'w')
file.close()
return api_key
def save_api_key(self, api_key):
data = {'api_key': api_key}
file = open(self.fpath, 'w')
file.write(json.dumps(data))
file.close()
def get_workspaces(self):
action_url = 'workspaces/'
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
return {
workspace["name"]: workspace["id"] for workspace in response.json()
}
def get_projects(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/'.format(workspace_id)
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
return {
project["name"]: project["id"] for project in response.json()
}
def get_tags(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/tags/'.format(workspace_id)
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
return {
tag["name"]: tag["id"] for tag in response.json()
}
def get_tasks(self, project_id, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
workspace_id, project_id
)
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
return {
task["name"]: task["id"] for task in response.json()
}
def get_workspace_id(self, workspace_name):
all_workspaces = self.get_workspaces()
if workspace_name not in all_workspaces:
return None
return all_workspaces[workspace_name]
def get_project_id(self, project_name, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
all_projects = self.get_projects(workspace_id)
if project_name not in all_projects:
return None
return all_projects[project_name]
def get_tag_id(self, tag_name, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
all_tasks = self.get_tags(workspace_id)
if tag_name not in all_tasks:
return None
return all_tasks[tag_name]
def get_task_id(
self, task_name, project_id, workspace_id=None
):
if workspace_id is None:
workspace_id = self.workspace_id
all_tasks = self.get_tasks(
project_id, workspace_id
)
if task_name not in all_tasks:
return None
return all_tasks[task_name]
def get_current_time(self):
return str(datetime.datetime.utcnow().isoformat())+'Z'
def start_time_entry(
self, description, project_id, task_id=None, tag_ids=[],
workspace_id=None, billable=True
):
# Workspace
if workspace_id is None:
workspace_id = self.workspace_id
# Check if is currently run another times and has same values
current = self.get_in_progress(workspace_id)
if current is not None:
if (
current.get("description", None) == description and
current.get("projectId", None) == project_id and
current.get("taskId", None) == task_id
):
self.bool_timer_run = True
return self.bool_timer_run
self.finish_time_entry(workspace_id)
# Convert billable to strings
if billable:
billable = 'true'
else:
billable = 'false'
# Rest API Action
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
start = self.get_current_time()
body = {
"start": start,
"billable": billable,
"description": description,
"projectId": project_id,
"taskId": task_id,
"tagIds": tag_ids
}
response = requests.post(
self.endpoint + action_url,
headers=self.headers,
json=body
)
success = False
if response.status_code < 300:
success = True
return success
def get_in_progress(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/inProgress'.format(
workspace_id
)
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
try:
output = response.json()
except json.decoder.JSONDecodeError:
output = None
return output
def finish_time_entry(self, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
current = self.get_in_progress(workspace_id)
current_id = current["id"]
action_url = 'workspaces/{}/timeEntries/{}'.format(
workspace_id, current_id
)
body = {
"start": current["timeInterval"]["start"],
"billable": current["billable"],
"description": current["description"],
"projectId": current["projectId"],
"taskId": current["taskId"],
"tagIds": current["tagIds"],
"end": self.get_current_time()
}
response = requests.put(
self.endpoint + action_url,
headers=self.headers,
json=body
)
return response.json()
def get_time_entries(
self, workspace_id=None, quantity=10
):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/'.format(workspace_id)
response = requests.get(
self.endpoint + action_url,
headers=self.headers
)
return response.json()[:quantity]
def remove_time_entry(self, tid, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/timeEntries/{}'.format(
workspace_id, tid
)
response = requests.delete(
self.endpoint + action_url,
headers=self.headers
)
return response.json()
def add_project(self, name, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/'.format(workspace_id)
body = {
"name": name,
"clientId": "",
"isPublic": "false",
"estimate": {
# "estimate": "3600",
"type": "AUTO"
},
"color": "#f44336",
"billable": "true"
}
response = requests.post(
self.endpoint + action_url,
headers=self.headers,
json=body
)
return response.json()
def add_workspace(self, name):
action_url = 'workspaces/'
body = {"name": name}
response = requests.post(
self.endpoint + action_url,
headers=self.headers,
json=body
)
return response.json()
def add_task(
self, name, project_id, workspace_id=None
):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/projects/{}/tasks/'.format(
workspace_id, project_id
)
body = {
"name": name,
"projectId": project_id
}
response = requests.post(
self.endpoint + action_url,
headers=self.headers,
json=body
)
return response.json()
def add_tag(self, name, workspace_id=None):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = 'workspaces/{}/tags'.format(workspace_id)
body = {
"name": name
}
response = requests.post(
self.endpoint + action_url,
headers=self.headers,
json=body
)
return response.json()
def delete_project(
self, project_id, workspace_id=None
):
if workspace_id is None:
workspace_id = self.workspace_id
action_url = '/workspaces/{}/projects/{}'.format(
workspace_id, project_id
)
response = requests.delete(
self.endpoint + action_url,
headers=self.headers,
)
return response.json()
def convert_input(
self, entity_id, entity_name, mode='Workspace', project_id=None
):
if entity_id is None:
error = False
error_msg = 'Missing information "{}"'
if mode.lower() == 'workspace':
if entity_id is None and entity_name is None:
if self.workspace_id is not None:
entity_id = self.workspace_id
else:
error = True
else:
entity_id = self.get_workspace_id(entity_name)
else:
if entity_id is None and entity_name is None:
error = True
elif mode.lower() == 'project':
entity_id = self.get_project_id(entity_name)
elif mode.lower() == 'task':
entity_id = self.get_task_id(
task_name=entity_name, project_id=project_id
)
else:
raise TypeError('Unknown type')
# Raise error
if error:
raise ValueError(error_msg.format(mode))
return entity_id

View file

@ -0,0 +1,155 @@
import os
from app.vendor.Qt import QtCore, QtGui, QtWidgets
from app import style
class ClockifySettings(QtWidgets.QWidget):
SIZE_W = 300
SIZE_H = 130
loginSignal = QtCore.Signal(object, object, object)
def __init__(self, main_parent=None, parent=None, optional=True):
super(ClockifySettings, self).__init__()
self.parent = parent
self.main_parent = main_parent
self.clockapi = parent.clockapi
self.optional = optional
self.validated = False
# Icon
if hasattr(parent, 'icon'):
self.setWindowIcon(self.parent.icon)
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
self.setWindowIcon(self.parent.parent.icon)
else:
pype_setup = os.getenv('PYPE_SETUP_ROOT')
items = [pype_setup, "app", "resources", "icon.png"]
fname = os.path.sep.join(items)
icon = QtGui.QIcon(fname)
self.setWindowIcon(icon)
self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint |
QtCore.Qt.WindowMinimizeButtonHint
)
self._translate = QtCore.QCoreApplication.translate
# Font
self.font = QtGui.QFont()
self.font.setFamily("DejaVu Sans Condensed")
self.font.setPointSize(9)
self.font.setBold(True)
self.font.setWeight(50)
self.font.setKerning(True)
# Size setting
self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
self.setStyleSheet(style.load_stylesheet())
self.setLayout(self._main())
self.setWindowTitle('Clockify settings')
def _main(self):
self.main = QtWidgets.QVBoxLayout()
self.main.setObjectName("main")
self.form = QtWidgets.QFormLayout()
self.form.setContentsMargins(10, 15, 10, 5)
self.form.setObjectName("form")
self.label_api_key = QtWidgets.QLabel("Clockify API key:")
self.label_api_key.setFont(self.font)
self.label_api_key.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.label_api_key.setTextFormat(QtCore.Qt.RichText)
self.label_api_key.setObjectName("label_api_key")
self.input_api_key = QtWidgets.QLineEdit()
self.input_api_key.setEnabled(True)
self.input_api_key.setFrame(True)
self.input_api_key.setObjectName("input_api_key")
self.input_api_key.setPlaceholderText(
self._translate("main", "e.g. XX1XxXX2x3x4xXxx")
)
self.error_label = QtWidgets.QLabel("")
self.error_label.setFont(self.font)
self.error_label.setTextFormat(QtCore.Qt.RichText)
self.error_label.setObjectName("error_label")
self.error_label.setWordWrap(True)
self.error_label.hide()
self.form.addRow(self.label_api_key, self.input_api_key)
self.form.addRow(self.error_label)
self.btn_group = QtWidgets.QHBoxLayout()
self.btn_group.addStretch(1)
self.btn_group.setObjectName("btn_group")
self.btn_ok = QtWidgets.QPushButton("Ok")
self.btn_ok.setToolTip('Sets Clockify API Key so can Start/Stop timer')
self.btn_ok.clicked.connect(self.click_ok)
self.btn_cancel = QtWidgets.QPushButton("Cancel")
cancel_tooltip = 'Application won\'t start'
if self.optional:
cancel_tooltip = 'Close this window'
self.btn_cancel.setToolTip(cancel_tooltip)
self.btn_cancel.clicked.connect(self._close_widget)
self.btn_group.addWidget(self.btn_ok)
self.btn_group.addWidget(self.btn_cancel)
self.main.addLayout(self.form)
self.main.addLayout(self.btn_group)
return self.main
def setError(self, msg):
self.error_label.setText(msg)
self.error_label.show()
def invalid_input(self, entity):
entity.setStyleSheet("border: 1px solid red;")
def click_ok(self):
api_key = self.input_api_key.text().strip()
if self.optional is True and api_key == '':
self.clockapi.save_api_key(None)
self.clockapi.set_api(api_key)
self.validated = False
self._close_widget()
return
validation = self.clockapi.validate_api_key(api_key)
if validation:
self.clockapi.save_api_key(api_key)
self.clockapi.set_api(api_key)
self.validated = True
self._close_widget()
else:
self.invalid_input(self.input_api_key)
self.validated = False
self.setError(
"Entered invalid API key"
)
def closeEvent(self, event):
if self.optional is True:
event.ignore()
self._close_widget()
else:
self.validated = False
def _close_widget(self):
if self.optional is True:
self.hide()
else:
self.close()

View file

@ -0,0 +1,105 @@
import sys
import argparse
import logging
import ftrack_api
from pype.ftrack import BaseAction
from pype.clockify import ClockifyAPI
class StartClockify(BaseAction):
'''Starts timer on clockify.'''
#: Action identifier.
identifier = 'clockify.start.timer'
#: Action label.
label = 'Start timer'
#: Action description.
description = 'Starts timer on clockify'
#: roles that are allowed to register this action
icon = 'https://clockify.me/assets/images/clockify-logo.png'
#: Clockify api
clockapi = ClockifyAPI()
def discover(self, session, entities, event):
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != 'task':
return False
if self.clockapi.workspace_id is None:
return False
return True
def launch(self, session, entities, event):
task = entities[0]
task_name = task['type']['name']
project_name = task['project']['full_name']
def get_parents(entity):
output = []
if entity.entity_type.lower() == 'project':
return output
output.extend(get_parents(entity['parent']))
output.append(entity['name'])
return output
desc_items = get_parents(task['parent'])
desc_items.append(task['name'])
description = '/'.join(desc_items)
project_id = self.clockapi.get_project_id(project_name)
tag_ids = []
tag_ids.append(self.clockapi.get_tag_id(task_name))
self.clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
)
return True
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
StartClockify(session).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -0,0 +1,148 @@
import sys
import argparse
import logging
import json
import ftrack_api
from pype.ftrack import BaseAction, MissingPermision
from pype.clockify import ClockifyAPI
class SyncClocify(BaseAction):
'''Synchronise project names and task types.'''
#: Action identifier.
identifier = 'clockify.sync'
#: Action label.
label = 'Sync To Clockify'
#: Action description.
description = 'Synchronise data to Clockify workspace'
#: priority
priority = 100
#: roles that are allowed to register this action
role_list = ['Pypeclub', 'Administrator']
#: icon
icon = 'https://clockify.me/assets/images/clockify-logo-white.svg'
#: CLockifyApi
clockapi = ClockifyAPI()
def register(self):
if self.clockapi.workspace_id is None:
raise ValueError('Clockify Workspace or API key are not set!')
if self.clockapi.validate_workspace_perm() is False:
raise MissingPermision('Clockify')
super().register()
def discover(self, session, entities, event):
''' Validation '''
return True
def launch(self, session, entities, event):
# JOB SETTINGS
userId = event['source']['user']['id']
user = session.query('User where id is ' + userId).one()
job = session.create('Job', {
'user': user,
'status': 'running',
'data': json.dumps({
'description': 'Sync Ftrack to Clockify'
})
})
session.commit()
try:
entity = entities[0]
if entity.entity_type.lower() == 'project':
project = entity
else:
project = entity['project']
project_name = project['full_name']
task_types = []
for task_type in project['project_schema']['_task_type_schema'][
'types'
]:
task_types.append(task_type['name'])
clockify_projects = self.clockapi.get_projects()
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
if 'id' not in response:
self.log.error('Project {} can\'t be created'.format(
project_name
))
return {
'success': False,
'message': 'Can\'t create project, unexpected error'
}
project_id = response['id']
else:
project_id = clockify_projects[project_name]
clockify_workspace_tags = self.clockapi.get_tags()
for task_type in task_types:
if task_type not in clockify_workspace_tags:
response = self.clockapi.add_tag(task_type)
if 'id' not in response:
self.log.error('Task {} can\'t be created'.format(
task_type
))
continue
except Exception:
job['status'] = 'failed'
session.commit()
return False
job['status'] = 'done'
session.commit()
return True
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):
return
SyncClocify(session).register()
def main(arguments=None):
'''Set up logging and register action.'''
if arguments is None:
arguments = []
parser = argparse.ArgumentParser()
# Allow setting of logging level from arguments.
loggingLevels = {}
for level in (
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
logging.ERROR, logging.CRITICAL
):
loggingLevels[logging.getLevelName(level).lower()] = level
parser.add_argument(
'-v', '--verbosity',
help='Set the logging output verbosity.',
choices=loggingLevels.keys(),
default='info'
)
namespace = parser.parse_args(arguments)
# Set up basic logging
logging.basicConfig(level=loggingLevels[namespace.verbosity])
session = ftrack_api.Session()
register(session)
# Wait for events
logging.info(
'Registered actions and listening for events. Use Ctrl-C to abort.'
)
session.event_hub.wait()
if __name__ == '__main__':
raise SystemExit(main(sys.argv[1:]))

View file

@ -23,8 +23,12 @@ class ThumbnailEvents(BaseEvent):
parent['name'], task['name']))
# Update task thumbnail from published version
if (entity['entityType'] == 'assetversion' and
entity['action'] == 'encoded'):
# if (entity['entityType'] == 'assetversion' and
# entity['action'] == 'encoded'):
if (
entity['entityType'] == 'assetversion'
and 'thumbid' in entity['keys']
):
version = session.get('AssetVersion', entity['entityId'])
thumbnail = version.get('thumbnail')
@ -40,6 +44,7 @@ class ThumbnailEvents(BaseEvent):
pass
def register(session, **kw):
'''Register plugin. Called when used as an plugin.'''
if not isinstance(session, ftrack_api.session.Session):

View file

@ -146,6 +146,25 @@ class AppAction(BaseHandler):
entity = entities[0]
project_name = entity['project']['full_name']
# Validate Clockify settings if Clockify is required
clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None)
if clockify_timer is not None:
from pype.clockify import ClockifyAPI
clockapi = ClockifyAPI()
if clockapi.verify_api() is False:
title = 'Launch message'
header = '# You Can\'t launch **any Application**'
message = (
'<p>You don\'t have set Clockify API'
' key in Clockify settings</p>'
)
items = [
{'type': 'label', 'value': header},
{'type': 'label', 'value': message}
]
self.show_interface(event, items, title)
return False
database = pypelib.get_avalon_database()
# Get current environments
@ -293,6 +312,31 @@ class AppAction(BaseHandler):
self.log.info('Starting timer for task: ' + task['name'])
user.start_timer(task, force=True)
# RUN TIMER IN Clockify
if clockify_timer is not None:
task_type = task['type']['name']
project_name = task['project']['full_name']
def get_parents(entity):
output = []
if entity.entity_type.lower() == 'project':
return output
output.extend(get_parents(entity['parent']))
output.append(entity['name'])
return output
desc_items = get_parents(task['parent'])
desc_items.append(task['name'])
description = '/'.join(desc_items)
project_id = clockapi.get_project_id(project_name)
tag_ids = []
tag_ids.append(clockapi.get_tag_id(task_type))
clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
)
# Change status of task to In progress
config = get_config_data()

View file

@ -5,8 +5,10 @@ from pype import api as pype
class MissingPermision(Exception):
def __init__(self):
super().__init__('Missing permission')
def __init__(self, message=None):
if message is None:
message = 'Ftrack'
super().__init__(message)
class BaseHandler(object):
@ -64,10 +66,10 @@ class BaseHandler(object):
self.log.info((
'{} "{}" - Registered successfully ({:.4f}sec)'
).format(self.type, label, run_time))
except MissingPermision:
except MissingPermision as MPE:
self.log.info((
'!{} "{}" - You\'re missing required permissions'
).format(self.type, label))
'!{} "{}" - You\'re missing required {} permissions'
).format(self.type, label, str(MPE)))
except AssertionError as ae:
self.log.info((
'!{} "{}" - {}'
@ -308,13 +310,15 @@ class BaseHandler(object):
}
elif isinstance(result, dict):
for key in ('success', 'message'):
if key in result:
continue
items = 'items' in result
if items is False:
for key in ('success', 'message'):
if key in result:
continue
raise KeyError(
'Missing required key: {0}.'.format(key)
)
raise KeyError(
'Missing required key: {0}.'.format(key)
)
else:
self.log.error(

View file

@ -8,14 +8,10 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Collect Assumed Destination"
order = pyblish.api.CollectorOrder + 0.499
order = pyblish.api.CollectorOrder + 0.498
exclude_families = ["clip"]
def process(self, instance):
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
return
"""Create a destination filepath based on the current data available
Example template:
@ -27,6 +23,9 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
Returns:
file path (str)
"""
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
return
# get all the stuff from the database
subset_name = instance.data["subset"]

View file

@ -16,9 +16,10 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
"""
label = "Extract Jpeg EXR"
hosts = ["shell"]
order = pyblish.api.ExtractorOrder
families = ["imagesequence", "render", "write", "source"]
host = ["shell"]
def process(self, instance):
start = instance.data.get("startFrame")

View file

@ -18,7 +18,7 @@ class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
label = "Extract Quicktime EXR"
order = pyblish.api.ExtractorOrder
families = ["imagesequence", "render", "write", "source"]
host = ["shell"]
hosts = ["shell"]
def process(self, instance):
fps = instance.data.get("fps")

View file

@ -168,6 +168,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
representations = []
destination_list = []
if 'transfers' not in instance.data:
instance.data['transfers'] = []
for files in instance.data["files"]:
# Collection
# _______
@ -240,7 +243,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
instance.data["transfers"].append([src, dst])
template_data["frame"] = "#####"
template_data["frame"] = "#" * anatomy.render.padding
anatomy_filled = anatomy.format(template_data)
path_to_save = anatomy_filled.render.path
template = anatomy.render.fullpath

View file

@ -7,7 +7,7 @@ from pype.tools import assetcreator
from pype.api import Logger
log = Logger.getLogger(__name__, "aport")
log = Logger.getLogger(__name__, "asset_creator")
class AssetCreator(api.Action):

View file

@ -0,0 +1,42 @@
from avalon import api, io
from pype.clockify import ClockifyAPI
from pype.api import Logger
log = Logger.getLogger(__name__, "clockify_start")
class ClockifyStart(api.Action):
name = "clockify_start_timer"
label = "Clockify - Start Timer"
icon = "clockify_icon"
order = 500
clockapi = ClockifyAPI()
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
if "AVALON_TASK" in session:
return True
return False
def process(self, session, **kwargs):
project_name = session['AVALON_PROJECT']
asset_name = session['AVALON_ASSET']
task_name = session['AVALON_TASK']
description = asset_name
asset = io.find_one({
'type': 'asset',
'name': asset_name
})
if asset is not None:
desc_items = asset.get('data', {}).get('parents', [])
desc_items.append(asset_name)
desc_items.append(task_name)
description = '/'.join(desc_items)
project_id = self.clockapi.get_project_id(project_name)
tag_ids = []
tag_ids.append(self.clockapi.get_tag_id(task_name))
self.clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
)

View file

@ -0,0 +1,57 @@
from avalon import api, io
from pype.clockify import ClockifyAPI
from pype.api import Logger
log = Logger.getLogger(__name__, "clockify_sync")
class ClockifySync(api.Action):
name = "sync_to_clockify"
label = "Sync to Clockify"
icon = "clockify_white_icon"
order = 500
clockapi = ClockifyAPI()
have_permissions = clockapi.validate_workspace_perm()
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
return self.have_permissions
def process(self, session, **kwargs):
project_name = session.get('AVALON_PROJECT', None)
projects_to_sync = []
if project_name.strip() == '' or project_name is None:
for project in io.projects():
projects_to_sync.append(project)
else:
project = io.find_one({'type': 'project'})
projects_to_sync.append(project)
projects_info = {}
for project in projects_to_sync:
task_types = [task['name'] for task in project['config']['tasks']]
projects_info[project['name']] = task_types
clockify_projects = self.clockapi.get_projects()
for project_name, task_types in projects_info.items():
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
if 'id' not in response:
self.log.error('Project {} can\'t be created'.format(
project_name
))
continue
project_id = response['id']
else:
project_id = clockify_projects[project_name]
clockify_workspace_tags = self.clockapi.get_tags()
for task_type in task_types:
if task_type not in clockify_workspace_tags:
response = self.clockapi.add_tag(task_type)
if 'id' not in response:
self.log.error('Task {} can\'t be created'.format(
task_type
))
continue

View file

@ -1,4 +1,6 @@
import pype.maya.plugin
import os
import json
class AbcLoader(pype.maya.plugin.ReferenceLoader):
@ -16,6 +18,12 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "animation"
groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
@ -25,6 +33,23 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader):
reference=True,
returnNewNodes=True)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return nodes

View file

@ -2,6 +2,7 @@ from avalon import api
import pype.maya.plugin
import os
import pymel.core as pm
import json
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
@ -21,6 +22,11 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
from avalon import maya
import pymel.core as pm
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "ass"
with maya.maintained_selection():
groupName = "{}:{}".format(namespace, name)
@ -34,7 +40,8 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
groupReference=True,
groupName=groupName)
cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
# Set attributes
proxyShape = pm.ls(nodes, type="mesh")[0]
@ -43,6 +50,19 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
proxyShape.dso.set(path)
proxyShape.aiOverrideShaders.set(0)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
@ -132,7 +152,6 @@ class AssStandinLoader(api.Loader):
import mtoa.ui.arnoldmenu
import pymel.core as pm
asset = context['asset']['name']
namespace = namespace or lib.unique_namespace(
asset + "_",
@ -146,6 +165,20 @@ class AssStandinLoader(api.Loader):
label = "{}:{}".format(namespace, name)
root = pm.group(name=label, empty=True)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get('ass')
if c is not None:
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
c[0], c[1], c[2])
# Create transform with shape
transform_name = label + "_ASS"
# transform = pm.createNode("transform", name=transform_name,

View file

@ -1,4 +1,6 @@
import pype.maya.plugin
import os
import json
class CameraLoader(pype.maya.plugin.ReferenceLoader):
@ -16,7 +18,13 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
# Get family type from the context
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "camera"
cmds.loadPlugin("AbcImport.mll", quiet=True)
groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
@ -27,6 +35,20 @@ class CameraLoader(pype.maya.plugin.ReferenceLoader):
cameras = cmds.ls(nodes, type="camera")
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
# Check the Maya version, lockTransform has been introduced since
# Maya 2016.5 Ext 2
version = int(cmds.about(version=True))

View file

@ -1,4 +1,6 @@
import pype.maya.plugin
import os
import json
class FBXLoader(pype.maya.plugin.ReferenceLoader):
@ -17,6 +19,11 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
from avalon import maya
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "fbx"
# Ensure FBX plug-in is loaded
cmds.loadPlugin("fbxmaya", quiet=True)
@ -28,6 +35,21 @@ class FBXLoader(pype.maya.plugin.ReferenceLoader):
groupReference=True,
groupName="{}:{}".format(namespace, name))
groupName = "{}:{}".format(namespace, name)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return nodes

View file

@ -1,4 +1,6 @@
import pype.maya.plugin
import json
import os
class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
@ -19,6 +21,11 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
from avalon import maya
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "model"
with maya.maintained_selection():
nodes = cmds.file(self.fname,
namespace=namespace,
@ -28,6 +35,20 @@ class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
groupName="{}:{}".format(namespace, name))
self[:] = nodes
groupName = "{}:{}".format(namespace, name)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
return nodes

View file

@ -1,5 +1,7 @@
from avalon import api
import pype.maya.plugin
import json
import os
class ModelLoader(pype.maya.plugin.ReferenceLoader):
@ -19,13 +21,36 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
from avalon import maya
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "model"
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
with maya.maintained_selection():
groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName="{}:{}".format(namespace, name))
groupName=groupName)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
@ -64,6 +89,19 @@ class GpuCacheLoader(api.Loader):
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get('model')
if c is not None:
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
c[0], c[1], c[2])
# Create transform with shape
transform_name = label + "_GPU"
@ -125,6 +163,7 @@ class GpuCacheLoader(api.Loader):
except RuntimeError:
pass
class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
"""Specific loader of Alembic for the studio.animation family"""
@ -141,15 +180,36 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader):
import maya.cmds as cmds
groupName = "{}:{}".format(namespace, name)
cmds.loadPlugin("AbcImport.mll", quiet=True)
nodes = cmds.file(self.fname,
namespace=namespace,
sharedReferenceFile=False,
groupReference=True,
groupName="{}:{}".format(namespace, name),
groupName=groupName,
reference=True,
returnNewNodes=True)
namespace = cmds.referenceQuery(nodes[0], namespace=True)
groupName = "{}:{}".format(namespace, name)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get('model')
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
return nodes

View file

@ -2,6 +2,8 @@ from maya import cmds
import pype.maya.plugin
from avalon import api, maya
import os
import json
class RigLoader(pype.maya.plugin.ReferenceLoader):
@ -21,12 +23,35 @@ class RigLoader(pype.maya.plugin.ReferenceLoader):
def process_reference(self, context, name, namespace, data):
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "rig"
groupName = "{}:{}".format(namespace, name)
nodes = cmds.file(self.fname,
namespace=namespace,
reference=True,
returnNewNodes=True,
groupReference=True,
groupName="{}:{}".format(namespace, name))
groupName=groupName)
cmds.makeIdentity(groupName, apply=False, rotate=True,
translate=True, scale=True)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
# Store for post-process
self[:] = nodes

View file

@ -1,4 +1,6 @@
from avalon import api
import os
import json
class LoadVDBtoRedShift(api.Loader):
@ -17,6 +19,11 @@ class LoadVDBtoRedShift(api.Loader):
import avalon.maya.lib as lib
from avalon.maya.pipeline import containerise
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "vdbcache"
# Check if the plugin for redshift is available on the pc
try:
cmds.loadPlugin("redshift4maya", quiet=True)
@ -48,6 +55,19 @@ class LoadVDBtoRedShift(api.Loader):
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
c[0], c[1], c[2])
# Create VR
volume_node = cmds.createNode("RedshiftVolumeShape",

View file

@ -1,4 +1,6 @@
from avalon import api
import json
import os
class LoadVDBtoVRay(api.Loader):
@ -16,6 +18,11 @@ class LoadVDBtoVRay(api.Loader):
import avalon.maya.lib as lib
from avalon.maya.pipeline import containerise
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "vdbcache"
# Check if viewport drawing engine is Open GL Core (compat)
render_engine = None
compatible = "OpenGLCoreProfileCompat"
@ -40,6 +47,19 @@ class LoadVDBtoVRay(api.Loader):
# Root group
label = "{}:{}".format(namespace, name)
root = cmds.group(name=label, empty=True)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(root + ".useOutlinerColor", 1)
cmds.setAttr(root + ".outlinerColor",
c[0], c[1], c[2])
# Create VR
grid_node = cmds.createNode("VRayVolumeGrid",

View file

@ -1,6 +1,7 @@
from avalon.maya import lib
from avalon import api
import json
import os
import maya.cmds as cmds
@ -20,6 +21,19 @@ class VRayProxyLoader(api.Loader):
from avalon.maya.pipeline import containerise
from pype.maya.lib import namespaced
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "vrayproxy"
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
asset_name = context['asset']["name"]
namespace = namespace or lib.unique_namespace(
asset_name + "_",
@ -40,6 +54,12 @@ class VRayProxyLoader(api.Loader):
if not nodes:
return
c = colors.get(family)
if c is not None:
cmds.setAttr("{0}_{1}.useOutlinerColor".format(name, "GRP"), 1)
cmds.setAttr("{0}_{1}.outlinerColor".format(name, "GRP"),
c[0], c[1], c[2])
return containerise(
name=name,
namespace=namespace,
@ -101,7 +121,7 @@ class VRayProxyLoader(api.Loader):
# Create nodes
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
name="{}_VRMM".format(name))
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
empty=True,

View file

@ -23,6 +23,11 @@ class YetiCacheLoader(api.Loader):
def load(self, context, name=None, namespace=None, data=None):
try:
family = context["representation"]["context"]["family"]
except ValueError:
family = "yeticache"
# Build namespace
asset = context["asset"]
if namespace is None:
@ -49,6 +54,19 @@ class YetiCacheLoader(api.Loader):
group_name = "{}:{}".format(namespace, name)
group_node = cmds.group(nodes, name=group_name)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get(family)
if c is not None:
cmds.setAttr(group_name + ".useOutlinerColor", 1)
cmds.setAttr(group_name + ".outlinerColor",
c[0], c[1], c[2])
nodes.append(group_node)

View file

@ -1,4 +1,6 @@
import pype.maya.plugin
import os
import json
class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
@ -24,6 +26,20 @@ class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
groupReference=True,
groupName="{}:{}".format(namespace, name))
groupName = "{}:{}".format(namespace, name)
preset_file = os.path.join(
os.environ.get('PYPE_STUDIO_TEMPLATES'),
'presets', 'tools',
'family_colors.json'
)
with open(preset_file, 'r') as cfile:
colors = json.load(cfile)
c = colors.get('yetiRig')
if c is not None:
cmds.setAttr(groupName + ".useOutlinerColor", 1)
cmds.setAttr(groupName + ".outlinerColor",
c[0], c[1], c[2])
self[:] = nodes
self.log.info("Yeti Rig Connection Manager will be available soon")

View file

@ -47,6 +47,8 @@ def get_look_attrs(node):
for attr in attrs:
if attr in SHAPE_ATTRS:
result.append(attr)
elif attr.startswith('ai'):
result.append(attr)
return result
@ -387,6 +389,8 @@ class CollectLook(pyblish.api.InstancePlugin):
# Collect changes to "custom" attributes
node_attrs = get_look_attrs(node)
self.log.info(node_attrs)
# Only include if there are any properties we care about
if not node_attrs:
continue

View file

@ -101,7 +101,7 @@ class LoadSequence(api.Loader):
if namespace is None:
namespace = context['asset']['name']
file = self.fname
file = self.fname.replace("\\", "/")
log.info("file: {}\n".format(self.fname))
read_name = "Read_" + context["representation"]["context"]["subset"]
@ -112,7 +112,7 @@ class LoadSequence(api.Loader):
r = nuke.createNode(
"Read",
"name {}".format(read_name))
r["file"].setValue(self.fname)
r["file"].setValue(file)
# Set colorspace defined in version data
colorspace = context["version"]["data"].get("colorspace", None)

View file

@ -1,5 +1,6 @@
import os
import re
import sys
from avalon import io
from avalon import api as avalon
from . import lib
@ -7,12 +8,14 @@ from app.api import (Templates, Logger, format)
log = Logger.getLogger(__name__,
os.getenv("AVALON_APP", "pype-config"))
SESSION = None
self = sys.modules[__name__]
self.SESSION = None
def set_session():
lib.set_io_database()
SESSION = avalon.session
self.SESSION = avalon.session
def load_data_from_templates():
@ -104,9 +107,9 @@ def set_project_code(code):
os.environ[KEY]: project code
avalon.sesion[KEY]: project code
"""
if SESSION is None:
if self.SESSION is None:
set_session()
SESSION["AVALON_PROJECTCODE"] = code
self.SESSION["AVALON_PROJECTCODE"] = code
os.environ["AVALON_PROJECTCODE"] = code
@ -118,9 +121,9 @@ def get_project_name():
string: project name
"""
if SESSION is None:
if self.SESSION is None:
set_session()
project_name = SESSION.get("AVALON_PROJECT", None) \
project_name = self.SESSION.get("AVALON_PROJECT", None) \
or os.getenv("AVALON_PROJECT", None)
assert project_name, log.error("missing `AVALON_PROJECT`"
"in avalon session "
@ -138,9 +141,9 @@ def get_asset():
Raises:
log: error
"""
if SESSION is None:
if self.SESSION is None:
set_session()
asset = SESSION.get("AVALON_ASSET", None) \
asset = self.SESSION.get("AVALON_ASSET", None) \
or os.getenv("AVALON_ASSET", None)
log.info("asset: {}".format(asset))
assert asset, log.error("missing `AVALON_ASSET`"
@ -159,9 +162,9 @@ def get_task():
Raises:
log: error
"""
if SESSION is None:
if self.SESSION is None:
set_session()
task = SESSION.get("AVALON_TASK", None) \
task = self.SESSION.get("AVALON_TASK", None) \
or os.getenv("AVALON_TASK", None)
assert task, log.error("missing `AVALON_TASK`"
"in avalon session "
@ -196,9 +199,9 @@ def set_hierarchy(hierarchy):
Args:
hierarchy (string): hierarchy path ("silo/folder/seq")
"""
if SESSION is None:
if self.SESSION is None:
set_session()
SESSION["AVALON_HIERARCHY"] = hierarchy
self.SESSION["AVALON_HIERARCHY"] = hierarchy
os.environ["AVALON_HIERARCHY"] = hierarchy
@ -248,10 +251,10 @@ def set_avalon_workdir(project=None,
avalon.session[AVALON_WORKDIR]: workdir path
"""
if SESSION is None:
if self.SESSION is None:
set_session()
awd = SESSION.get("AVALON_WORKDIR", None) \
or os.getenv("AVALON_WORKDIR", None)
awd = self.SESSION.get("AVALON_WORKDIR", None) or os.getenv("AVALON_WORKDIR", None)
data = get_context_data(project, hierarchy, asset, task)
if (not awd) or ("{" not in awd):
@ -259,7 +262,7 @@ def set_avalon_workdir(project=None,
awd_filled = os.path.normpath(format(awd, data))
SESSION["AVALON_WORKDIR"] = awd_filled
self.SESSION["AVALON_WORKDIR"] = awd_filled
os.environ["AVALON_WORKDIR"] = awd_filled
log.info("`AVALON_WORKDIR` fixed to: {}".format(awd_filled))