Merged in release/2.2.0 (pull request #294)
Release/2.2.0 Approved-by: Milan Kolar <milan@orbi.tools>
|
|
@ -23,6 +23,7 @@ from .lib import (
|
|||
get_asset,
|
||||
get_project,
|
||||
get_hierarchy,
|
||||
get_subsets,
|
||||
get_version_from_path,
|
||||
modified_environ,
|
||||
add_tool_to_environment
|
||||
|
|
@ -53,6 +54,7 @@ __all__ = [
|
|||
"get_project",
|
||||
"get_hierarchy",
|
||||
"get_asset",
|
||||
"get_subsets",
|
||||
"get_version_from_path",
|
||||
"modified_environ",
|
||||
"add_tool_to_environment",
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
from .clockify_api import ClockifyAPI
|
||||
from .widget_settings import ClockifySettings
|
||||
from .widget_message import MessageWidget
|
||||
from .clockify import ClockifyModule
|
||||
|
||||
__all__ = [
|
||||
'ClockifyAPI',
|
||||
'ClockifySettings',
|
||||
'ClockifyModule'
|
||||
"ClockifyAPI",
|
||||
"ClockifySettings",
|
||||
"ClockifyModule",
|
||||
"MessageWidget"
|
||||
]
|
||||
|
||||
def tray_init(tray_widget, main_widget):
|
||||
return ClockifyModule(main_widget, tray_widget)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,19 @@
|
|||
import os
|
||||
import threading
|
||||
from pypeapp import style
|
||||
from pypeapp import style, Logger
|
||||
from Qt import QtWidgets
|
||||
from pype.clockify import ClockifySettings, ClockifyAPI
|
||||
from . import ClockifySettings, ClockifyAPI, MessageWidget
|
||||
|
||||
|
||||
class ClockifyModule:
|
||||
|
||||
def __init__(self, main_parent=None, parent=None):
|
||||
self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")
|
||||
|
||||
self.main_parent = main_parent
|
||||
self.parent = parent
|
||||
self.clockapi = ClockifyAPI()
|
||||
self.message_widget = None
|
||||
self.widget_settings = ClockifySettings(main_parent, self)
|
||||
self.widget_settings_required = None
|
||||
|
||||
|
|
@ -20,9 +24,10 @@ class ClockifyModule:
|
|||
self.bool_workspace_set = False
|
||||
self.bool_timer_run = False
|
||||
|
||||
def start_up(self):
|
||||
self.clockapi.set_master(self)
|
||||
self.bool_api_key_set = self.clockapi.set_api()
|
||||
|
||||
def tray_start(self):
|
||||
if self.bool_api_key_set is False:
|
||||
self.show_settings()
|
||||
return
|
||||
|
|
@ -41,7 +46,7 @@ class ClockifyModule:
|
|||
os.path.dirname(__file__),
|
||||
'ftrack_actions'
|
||||
])
|
||||
current = os.environ('FTRACK_ACTIONS_PATH', '')
|
||||
current = os.environ.get('FTRACK_ACTIONS_PATH', '')
|
||||
if current:
|
||||
current += os.pathsep
|
||||
os.environ['FTRACK_ACTIONS_PATH'] = current + actions_path
|
||||
|
|
@ -57,6 +62,25 @@ class ClockifyModule:
|
|||
current += os.pathsep
|
||||
os.environ['AVALON_ACTIONS'] = current + actions_path
|
||||
|
||||
if 'TimersManager' in modules:
|
||||
self.timer_manager = modules['TimersManager']
|
||||
self.timer_manager.add_module(self)
|
||||
|
||||
def start_timer_manager(self, data):
|
||||
self.start_timer(data)
|
||||
|
||||
def stop_timer_manager(self):
|
||||
self.stop_timer()
|
||||
|
||||
def timer_started(self, data):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
self.timer_manager.start_timers(data)
|
||||
|
||||
def timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
if hasattr(self, 'timer_manager'):
|
||||
self.timer_manager.stop_timers()
|
||||
|
||||
def start_timer_check(self):
|
||||
self.bool_thread_check_running = True
|
||||
if self.thread_timer_check is None:
|
||||
|
|
@ -75,21 +99,129 @@ class ClockifyModule:
|
|||
def check_running(self):
|
||||
import time
|
||||
while self.bool_thread_check_running is True:
|
||||
bool_timer_run = False
|
||||
if self.clockapi.get_in_progress() is not None:
|
||||
self.bool_timer_run = True
|
||||
else:
|
||||
self.bool_timer_run = False
|
||||
self.set_menu_visibility()
|
||||
bool_timer_run = True
|
||||
|
||||
if self.bool_timer_run != bool_timer_run:
|
||||
if self.bool_timer_run is True:
|
||||
self.timer_stopped()
|
||||
elif self.bool_timer_run is False:
|
||||
actual_timer = self.clockapi.get_in_progress()
|
||||
if not actual_timer:
|
||||
continue
|
||||
|
||||
actual_proj_id = actual_timer["projectId"]
|
||||
if not actual_proj_id:
|
||||
continue
|
||||
|
||||
project = self.clockapi.get_project_by_id(actual_proj_id)
|
||||
if project and project.get("code") == 501:
|
||||
continue
|
||||
|
||||
project_name = project["name"]
|
||||
|
||||
actual_timer_hierarchy = actual_timer["description"]
|
||||
hierarchy_items = actual_timer_hierarchy.split("/")
|
||||
# Each pype timer must have at least 2 items!
|
||||
if len(hierarchy_items) < 2:
|
||||
continue
|
||||
task_name = hierarchy_items[-1]
|
||||
hierarchy = hierarchy_items[:-1]
|
||||
|
||||
task_type = None
|
||||
if len(actual_timer.get("tags", [])) > 0:
|
||||
task_type = actual_timer["tags"][0].get("name")
|
||||
data = {
|
||||
"task_name": task_name,
|
||||
"hierarchy": hierarchy,
|
||||
"project_name": project_name,
|
||||
"task_type": task_type
|
||||
}
|
||||
|
||||
self.timer_started(data)
|
||||
|
||||
self.bool_timer_run = bool_timer_run
|
||||
self.set_menu_visibility()
|
||||
time.sleep(5)
|
||||
|
||||
def stop_timer(self):
|
||||
self.clockapi.finish_time_entry()
|
||||
self.bool_timer_run = False
|
||||
if self.bool_timer_run:
|
||||
self.timer_stopped()
|
||||
|
||||
def signed_in(self):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if not self.timer_manager:
|
||||
return
|
||||
|
||||
if not self.timer_manager.last_task:
|
||||
return
|
||||
|
||||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
|
||||
def start_timer(self, input_data):
|
||||
# If not api key is not entered then skip
|
||||
if not self.clockapi.get_api_key():
|
||||
return
|
||||
|
||||
actual_timer = self.clockapi.get_in_progress()
|
||||
actual_timer_hierarchy = None
|
||||
actual_project_id = None
|
||||
if actual_timer is not None:
|
||||
actual_timer_hierarchy = actual_timer.get("description")
|
||||
actual_project_id = actual_timer.get("projectId")
|
||||
|
||||
# Concatenate hierarchy and task to get description
|
||||
desc_items = [val for val in input_data.get("hierarchy", [])]
|
||||
desc_items.append(input_data["task_name"])
|
||||
description = "/".join(desc_items)
|
||||
|
||||
# Check project existence
|
||||
project_name = input_data["project_name"]
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
if not project_id:
|
||||
self.log.warning((
|
||||
"Project \"{}\" was not found in Clockify. Timer won't start."
|
||||
).format(project_name))
|
||||
|
||||
msg = (
|
||||
"Project <b>\"{}\"</b> is not in Clockify Workspace <b>\"{}\"</b>."
|
||||
"<br><br>Please inform your Project Manager."
|
||||
).format(project_name, str(self.clockapi.workspace))
|
||||
|
||||
self.message_widget = MessageWidget(
|
||||
self.main_parent, msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(self.message_widget)
|
||||
self.message_widget.show()
|
||||
|
||||
return
|
||||
|
||||
if (
|
||||
actual_timer is not None and
|
||||
description == actual_timer_hierarchy and
|
||||
project_id == actual_project_id
|
||||
):
|
||||
return
|
||||
|
||||
tag_ids = []
|
||||
task_tag_id = self.clockapi.get_tag_id(input_data["task_type"])
|
||||
if task_tag_id is not None:
|
||||
tag_ids.append(task_tag_id)
|
||||
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
|
||||
def on_message_widget_close(self):
|
||||
self.message_widget = None
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent):
|
||||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
self.menu = QtWidgets.QMenu('Clockify', parent)
|
||||
self.menu = QtWidgets.QMenu('Clockify', parent_menu)
|
||||
self.menu.setProperty('submenu', 'on')
|
||||
self.menu.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
|
|
@ -109,7 +241,7 @@ class ClockifyModule:
|
|||
|
||||
self.set_menu_visibility()
|
||||
|
||||
return self.menu
|
||||
parent_menu.addMenu(self.menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import requests
|
||||
import json
|
||||
import datetime
|
||||
|
|
@ -22,7 +23,9 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
app_dir = os.path.normpath(appdirs.user_data_dir('pype-app', 'pype'))
|
||||
file_name = 'clockify.json'
|
||||
fpath = os.path.join(app_dir, file_name)
|
||||
admin_permission_names = ['WORKSPACE_OWN', 'WORKSPACE_ADMIN']
|
||||
master_parent = None
|
||||
workspace = None
|
||||
workspace_id = None
|
||||
|
||||
def set_master(self, master_parent):
|
||||
|
|
@ -41,6 +44,8 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
if api_key is not None and self.validate_api_key(api_key) is True:
|
||||
self.headers["X-Api-Key"] = api_key
|
||||
self.set_workspace()
|
||||
if self.master_parent:
|
||||
self.master_parent.signed_in()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
|
@ -55,31 +60,41 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
return False
|
||||
return True
|
||||
|
||||
def validate_workspace_perm(self):
|
||||
test_project = '__test__'
|
||||
action_url = 'workspaces/{}/projects/'.format(self.workspace_id)
|
||||
body = {
|
||||
"name": test_project, "clientId": "", "isPublic": "false",
|
||||
"estimate": {"type": "AUTO"},
|
||||
"color": "#f44336", "billable": "true"
|
||||
}
|
||||
response = requests.post(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers, json=body
|
||||
def validate_workspace_perm(self, workspace_id=None):
|
||||
user_id = self.get_user_id()
|
||||
if user_id is None:
|
||||
return False
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = "/workspaces/{}/users/{}/permissions".format(
|
||||
workspace_id, user_id
|
||||
)
|
||||
if response.status_code == 201:
|
||||
self.delete_project(self.get_project_id(test_project))
|
||||
return True
|
||||
else:
|
||||
projects = self.get_projects()
|
||||
if test_project in projects:
|
||||
try:
|
||||
self.delete_project(self.get_project_id(test_project))
|
||||
return True
|
||||
except json.decoder.JSONDecodeError:
|
||||
return False
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
user_permissions = response.json()
|
||||
for perm in user_permissions:
|
||||
if perm['name'] in self.admin_permission_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_user_id(self):
|
||||
action_url = 'v1/user/'
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
# this regex is neccessary: UNICODE strings are crashing
|
||||
# during json serialization
|
||||
id_regex ='\"{1}id\"{1}\:{1}\"{1}\w+\"{1}'
|
||||
result = re.findall(id_regex, str(response.content))
|
||||
if len(result) != 1:
|
||||
# replace with log and better message?
|
||||
print('User ID was not found (this is a BUG!!!)')
|
||||
return None
|
||||
return json.loads('{'+result[0]+'}')['id']
|
||||
|
||||
def set_workspace(self, name=None):
|
||||
if name is None:
|
||||
name = os.environ.get('CLOCKIFY_WORKSPACE', None)
|
||||
|
|
@ -147,6 +162,19 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
project["name"]: project["id"] for project in response.json()
|
||||
}
|
||||
|
||||
def get_project_by_id(self, project_id, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
action_url = 'workspaces/{}/projects/{}/'.format(
|
||||
workspace_id, project_id
|
||||
)
|
||||
response = requests.get(
|
||||
self.endpoint + action_url,
|
||||
headers=self.headers
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
||||
def get_tags(self, workspace_id=None):
|
||||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
|
|
@ -279,6 +307,9 @@ class ClockifyAPI(metaclass=Singleton):
|
|||
if workspace_id is None:
|
||||
workspace_id = self.workspace_id
|
||||
current = self.get_in_progress(workspace_id)
|
||||
if current is None:
|
||||
return
|
||||
|
||||
current_id = current["id"]
|
||||
action_url = 'workspaces/{}/timeEntries/{}'.format(
|
||||
workspace_id, current_id
|
||||
|
|
|
|||
|
|
@ -1,108 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.clockify import ClockifyAPI
|
||||
|
||||
|
||||
class StartClockify(BaseAction):
|
||||
'''Starts timer on clockify.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'clockify.start.timer'
|
||||
#: Action label.
|
||||
label = 'Start timer'
|
||||
#: Action description.
|
||||
description = 'Starts timer on clockify'
|
||||
#: roles that are allowed to register this action
|
||||
icon = '{}/app_icons/clockify.png'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: Clockify api
|
||||
clockapi = ClockifyAPI()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
if entities[0].entity_type.lower() != 'task':
|
||||
return False
|
||||
if self.clockapi.workspace_id is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
task = entities[0]
|
||||
task_name = task['type']['name']
|
||||
project_name = task['project']['full_name']
|
||||
|
||||
def get_parents(entity):
|
||||
output = []
|
||||
if entity.entity_type.lower() == 'project':
|
||||
return output
|
||||
output.extend(get_parents(entity['parent']))
|
||||
output.append(entity['name'])
|
||||
|
||||
return output
|
||||
|
||||
desc_items = get_parents(task['parent'])
|
||||
desc_items.append(task['name'])
|
||||
description = '/'.join(desc_items)
|
||||
project_id = self.clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(self.clockapi.get_tag_id(task_name))
|
||||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
StartClockify(session).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -17,10 +17,8 @@ class SyncClocify(BaseAction):
|
|||
label = 'Sync To Clockify'
|
||||
#: Action description.
|
||||
description = 'Synchronise data to Clockify workspace'
|
||||
#: priority
|
||||
priority = 100
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
role_list = ["Pypeclub", "Administrator", "project Manager"]
|
||||
#: icon
|
||||
icon = '{}/app_icons/clockify-white.png'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
|
|
@ -28,16 +26,22 @@ class SyncClocify(BaseAction):
|
|||
#: CLockifyApi
|
||||
clockapi = ClockifyAPI()
|
||||
|
||||
def register(self):
|
||||
def preregister(self):
|
||||
if self.clockapi.workspace_id is None:
|
||||
raise ValueError('Clockify Workspace or API key are not set!')
|
||||
return "Clockify Workspace or API key are not set!"
|
||||
|
||||
if self.clockapi.validate_workspace_perm() is False:
|
||||
raise MissingPermision('Clockify')
|
||||
super().register()
|
||||
|
||||
return True
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
if entities[0].entity_type.lower() != "project":
|
||||
return False
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
|
|
|
|||
91
pype/clockify/widget_message.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
from Qt import QtCore, QtGui, QtWidgets
|
||||
from pypeapp import style
|
||||
|
||||
|
||||
class MessageWidget(QtWidgets.QWidget):
|
||||
|
||||
SIZE_W = 300
|
||||
SIZE_H = 130
|
||||
|
||||
closed = QtCore.Signal()
|
||||
|
||||
def __init__(self, parent=None, messages=[], title="Message"):
|
||||
|
||||
super(MessageWidget, self).__init__()
|
||||
|
||||
self._parent = parent
|
||||
|
||||
# Icon
|
||||
if parent and hasattr(parent, 'icon'):
|
||||
self.setWindowIcon(parent.icon)
|
||||
else:
|
||||
from pypeapp.resources import get_resource
|
||||
self.setWindowIcon(QtGui.QIcon(get_resource('icon.png')))
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowCloseButtonHint |
|
||||
QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
# Font
|
||||
self.font = QtGui.QFont()
|
||||
self.font.setFamily("DejaVu Sans Condensed")
|
||||
self.font.setPointSize(9)
|
||||
self.font.setBold(True)
|
||||
self.font.setWeight(50)
|
||||
self.font.setKerning(True)
|
||||
|
||||
# Size setting
|
||||
self.resize(self.SIZE_W, self.SIZE_H)
|
||||
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
|
||||
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
|
||||
|
||||
# Style
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
self.setLayout(self._ui_layout(messages))
|
||||
self.setWindowTitle(title)
|
||||
|
||||
def _ui_layout(self, messages):
|
||||
if not messages:
|
||||
messages = ["*Misssing messages (This is a bug)*", ]
|
||||
|
||||
elif not isinstance(messages, (tuple, list)):
|
||||
messages = [messages, ]
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
|
||||
labels = []
|
||||
for message in messages:
|
||||
label = QtWidgets.QLabel(message)
|
||||
label.setFont(self.font)
|
||||
label.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
|
||||
label.setTextFormat(QtCore.Qt.RichText)
|
||||
label.setWordWrap(True)
|
||||
|
||||
labels.append(label)
|
||||
main_layout.addWidget(label)
|
||||
|
||||
btn_close = QtWidgets.QPushButton("Close")
|
||||
btn_close.setToolTip('Close this window')
|
||||
btn_close.clicked.connect(self.on_close_clicked)
|
||||
|
||||
btn_group = QtWidgets.QHBoxLayout()
|
||||
btn_group.addStretch(1)
|
||||
btn_group.addWidget(btn_close)
|
||||
|
||||
main_layout.addLayout(btn_group)
|
||||
|
||||
self.labels = labels
|
||||
self.btn_group = btn_group
|
||||
self.btn_close = btn_close
|
||||
self.main_layout = main_layout
|
||||
|
||||
return main_layout
|
||||
|
||||
def on_close_clicked(self):
|
||||
self.close()
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
self.closed.emit()
|
||||
super(MessageWidget, self).close(*args, **kwargs)
|
||||
|
|
@ -9,7 +9,7 @@ from pype.lib import get_all_avalon_projects
|
|||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def registerApp(app, session):
|
||||
def registerApp(app, session, plugins_presets):
|
||||
name = app['name']
|
||||
variant = ""
|
||||
try:
|
||||
|
|
@ -41,14 +41,14 @@ def registerApp(app, session):
|
|||
# register action
|
||||
AppAction(
|
||||
session, label, name, executable, variant,
|
||||
icon, description, preactions
|
||||
icon, description, preactions, plugins_presets
|
||||
).register()
|
||||
|
||||
if not variant:
|
||||
log.info('- Variant is not set')
|
||||
|
||||
|
||||
def register(session):
|
||||
def register(session, plugins_presets={}):
|
||||
# WARNING getting projects only helps to check connection to mongo
|
||||
# - without will `discover` of ftrack apps actions take ages
|
||||
result = get_all_avalon_projects()
|
||||
|
|
@ -71,7 +71,7 @@ def register(session):
|
|||
app_counter = 0
|
||||
for app in apps:
|
||||
try:
|
||||
registerApp(app, session)
|
||||
registerApp(app, session, plugins_presets)
|
||||
if app_counter%5 == 0:
|
||||
time.sleep(0.1)
|
||||
app_counter += 1
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ class AssetDelete(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -87,7 +87,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
AssetDelete(session).register()
|
||||
AssetDelete(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import os
|
|||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AttributesRemapper(BaseAction):
|
||||
|
|
@ -11,13 +11,14 @@ class AttributesRemapper(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'attributes.remapper'
|
||||
#: Action label.
|
||||
label = 'Attributes Remapper'
|
||||
label = "Pype Doctor"
|
||||
variant = '- Attributes Remapper'
|
||||
#: Action description.
|
||||
description = 'Remaps attributes in avalon DB'
|
||||
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator"]
|
||||
icon = '{}/ftrack/action_icons/AttributesRemapper.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -274,12 +275,12 @@ class AttributesRemapper(BaseAction):
|
|||
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
|
||||
items.append(message)
|
||||
|
||||
self.show_interface(event, items, title)
|
||||
self.show_interface(items=items, title=title, event=event)
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
AttributesRemapper(session).register()
|
||||
AttributesRemapper(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -53,12 +53,12 @@ class ClientReviewSort(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
action_handler = ClientReviewSort(session)
|
||||
action_handler = ClientReviewSort(session, plugins_presets)
|
||||
action_handler.register()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class ComponentOpen(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -74,7 +74,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ComponentOpen(session).register()
|
||||
ComponentOpen(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -110,12 +110,13 @@ class CustomAttributes(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'create.update.attributes'
|
||||
#: Action label.
|
||||
label = 'Create/Update Avalon Attributes'
|
||||
label = "Pype Admin"
|
||||
variant = '- Create/Update Avalon Attributes'
|
||||
#: Action description.
|
||||
description = 'Creates Avalon/Mongo ID for double check'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/CustomAttributes.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -568,7 +569,7 @@ class CustomAttributes(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -577,7 +578,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CustomAttributes(session).register()
|
||||
CustomAttributes(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import re
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import lib as avalonlib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from pypeapp import config, Anatomy
|
||||
|
||||
|
||||
|
|
@ -30,11 +30,13 @@ class CreateFolders(BaseAction):
|
|||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
not_allowed = ['assetversion']
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
not_allowed = ['assetversion', 'project']
|
||||
if entities[0].entity_type.lower() in not_allowed:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
|
|
@ -322,13 +324,13 @@ class PartialDict(dict):
|
|||
return '{'+key+'}'
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CreateFolders(session).register()
|
||||
CreateFolders(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -13,9 +13,9 @@ class CreateProjectFolders(BaseAction):
|
|||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'create.project.folders'
|
||||
identifier = 'create.project.structure'
|
||||
#: Action label.
|
||||
label = 'Create Project Folders'
|
||||
label = 'Create Project Structure'
|
||||
#: Action description.
|
||||
description = 'Creates folder structure'
|
||||
#: roles that are allowed to register this action
|
||||
|
|
@ -31,6 +31,11 @@ class CreateProjectFolders(BaseAction):
|
|||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
if entities[0].entity_type.lower() != "project":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -190,13 +195,13 @@ class CreateProjectFolders(BaseAction):
|
|||
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CreateProjectFolders(session).register()
|
||||
CreateProjectFolders(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
@ -12,14 +12,15 @@ class CustomAttributeDoctor(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'custom.attributes.doctor'
|
||||
#: Action label.
|
||||
label = 'Custom Attributes Doctor'
|
||||
label = "Pype Doctor"
|
||||
variant = '- Custom Attributes Doctor'
|
||||
#: Action description.
|
||||
description = (
|
||||
'Fix hierarchical custom attributes mainly handles, fstart'
|
||||
' and fend'
|
||||
)
|
||||
|
||||
icon = '{}/ftrack/action_icons/TestAction.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
hierarchical_ca = ['handle_start', 'handle_end', 'fstart', 'fend']
|
||||
|
|
@ -286,13 +287,13 @@ class CustomAttributeDoctor(BaseAction):
|
|||
return all_roles
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
CustomAttributeDoctor(session).register()
|
||||
CustomAttributeDoctor(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from bson.objectid import ObjectId
|
|||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class DeleteAsset(BaseAction):
|
||||
|
|
@ -311,7 +311,7 @@ class DeleteAsset(BaseAction):
|
|||
return assets
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -320,7 +320,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
DeleteAsset(session).register()
|
||||
DeleteAsset(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
import argparse
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AssetsRemover(BaseAction):
|
||||
|
|
@ -13,12 +13,13 @@ class AssetsRemover(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'remove.assets'
|
||||
#: Action label.
|
||||
label = 'Delete Assets by Name'
|
||||
label = "Pype Admin"
|
||||
variant = '- Delete Assets by Name'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/AssetsRemover.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: Db
|
||||
|
|
@ -131,7 +132,7 @@ class AssetsRemover(BaseAction):
|
|||
return assets
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -140,7 +141,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
AssetsRemover(session).register()
|
||||
AssetsRemover(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class VersionsCleanup(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -51,7 +51,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
VersionsCleanup(session).register()
|
||||
VersionsCleanup(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -21,9 +21,9 @@ class DJVViewAction(BaseAction):
|
|||
)
|
||||
type = 'Application'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
self.djv_path = None
|
||||
|
||||
self.config_data = config.get_presets()['djv_view']['config']
|
||||
|
|
@ -218,12 +218,12 @@ class DJVViewAction(BaseAction):
|
|||
return True
|
||||
|
||||
|
||||
def register(session):
|
||||
def register(session, plugins_presets={}):
|
||||
"""Register hooks."""
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
DJVViewAction(session).register()
|
||||
DJVViewAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -14,12 +14,13 @@ class JobKiller(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'job.killer'
|
||||
#: Action label.
|
||||
label = 'Job Killer'
|
||||
label = "Pype Admin"
|
||||
variant = '- Job Killer'
|
||||
#: Action description.
|
||||
description = 'Killing selected running jobs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/JobKiller.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -117,7 +118,7 @@ class JobKiller(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -126,7 +127,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
JobKiller(session).register()
|
||||
JobKiller(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -112,13 +112,13 @@ class MultipleNotes(BaseAction):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
MultipleNotes(session).register()
|
||||
MultipleNotes(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
378
pype/ftrack/actions/action_prepare_project.py
Normal file
|
|
@ -0,0 +1,378 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from ruamel import yaml
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import config
|
||||
from pype.ftrack.lib import get_avalon_attr
|
||||
|
||||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class PrepareProject(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'prepare.project'
|
||||
#: Action label.
|
||||
label = 'Prepare Project'
|
||||
#: Action description.
|
||||
description = 'Set basic attributes on the project'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "Project manager"]
|
||||
icon = '{}/ftrack/action_icons/PrepareProject.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
# Key to store info about trigerring create folder structure
|
||||
create_project_structure_key = "create_folder_structure"
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
if entities[0].entity_type.lower() != "project":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
# Inform user that this may take a while
|
||||
self.show_message(event, "Preparing data... Please wait", True)
|
||||
|
||||
self.log.debug("Loading custom attributes")
|
||||
cust_attrs, hier_cust_attrs = get_avalon_attr(session, True)
|
||||
project_defaults = config.get_presets(
|
||||
entities[0]["full_name"]
|
||||
).get("ftrack", {}).get("project_defaults", {})
|
||||
|
||||
self.log.debug("Preparing data which will be shown")
|
||||
attributes_to_set = {}
|
||||
for attr in hier_cust_attrs:
|
||||
key = attr["key"]
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": project_defaults.get(key)
|
||||
}
|
||||
|
||||
for attr in cust_attrs:
|
||||
if attr["entity_type"].lower() != "show":
|
||||
continue
|
||||
key = attr["key"]
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": project_defaults.get(key)
|
||||
}
|
||||
|
||||
# Sort by label
|
||||
attributes_to_set = dict(sorted(
|
||||
attributes_to_set.items(),
|
||||
key=lambda x: x[1]["label"]
|
||||
))
|
||||
self.log.debug("Preparing interface for keys: \"{}\"".format(
|
||||
str([key for key in attributes_to_set])
|
||||
))
|
||||
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
title = "Prepare Project"
|
||||
items = []
|
||||
|
||||
# Ask if want to trigger Action Create Folder Structure
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Want to create basic Folder Structure?</h3>"
|
||||
})
|
||||
|
||||
items.append({
|
||||
"name": self.create_project_structure_key,
|
||||
"type": "boolean",
|
||||
"value": False,
|
||||
"label": "Check if Yes"
|
||||
})
|
||||
|
||||
items.append(item_splitter)
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Set basic Attributes:</h3>"
|
||||
})
|
||||
|
||||
multiselect_enumerators = []
|
||||
|
||||
# This item will be last (before enumerators)
|
||||
# - sets value of auto synchronization
|
||||
auto_sync_name = "avalon_auto_sync"
|
||||
auto_sync_item = {
|
||||
"name": auto_sync_name,
|
||||
"type": "boolean",
|
||||
"value": project_defaults.get(auto_sync_name, False),
|
||||
"label": "AutoSync to Avalon"
|
||||
}
|
||||
|
||||
for key, in_data in attributes_to_set.items():
|
||||
attr = in_data["object"]
|
||||
|
||||
# initial item definition
|
||||
item = {
|
||||
"name": key,
|
||||
"label": in_data["label"]
|
||||
}
|
||||
|
||||
# cust attr type - may have different visualization
|
||||
type_name = attr["type"]["name"].lower()
|
||||
easy_types = ["text", "boolean", "date", "number"]
|
||||
|
||||
easy_type = False
|
||||
if type_name in easy_types:
|
||||
easy_type = True
|
||||
|
||||
elif type_name == "enumerator":
|
||||
|
||||
attr_config = json.loads(attr["config"])
|
||||
attr_config_data = json.loads(attr_config["data"])
|
||||
|
||||
if attr_config["multiSelect"] is True:
|
||||
multiselect_enumerators.append(item_splitter)
|
||||
|
||||
multiselect_enumerators.append({
|
||||
"type": "label",
|
||||
"value": in_data["label"]
|
||||
})
|
||||
|
||||
default = in_data["default"]
|
||||
names = []
|
||||
for option in sorted(
|
||||
attr_config_data, key=lambda x: x["menu"]
|
||||
):
|
||||
name = option["value"]
|
||||
new_name = "__{}__{}".format(key, name)
|
||||
names.append(new_name)
|
||||
item = {
|
||||
"name": new_name,
|
||||
"type": "boolean",
|
||||
"label": "- {}".format(option["menu"])
|
||||
}
|
||||
if default:
|
||||
if (
|
||||
isinstance(default, list) or
|
||||
isinstance(default, tuple)
|
||||
):
|
||||
if name in default:
|
||||
item["value"] = True
|
||||
else:
|
||||
if name == default:
|
||||
item["value"] = True
|
||||
|
||||
multiselect_enumerators.append(item)
|
||||
|
||||
multiselect_enumerators.append({
|
||||
"type": "hidden",
|
||||
"name": "__hidden__{}".format(key),
|
||||
"value": json.dumps(names)
|
||||
})
|
||||
else:
|
||||
easy_type = True
|
||||
item["data"] = attr_config_data
|
||||
|
||||
else:
|
||||
self.log.warning((
|
||||
"Custom attribute \"{}\" has type \"{}\"."
|
||||
" I don't know how to handle"
|
||||
).format(key, type_name))
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"!!! Can't handle Custom attritubte type \"{}\""
|
||||
" (key: \"{}\")"
|
||||
).format(type_name, key)
|
||||
})
|
||||
|
||||
if easy_type:
|
||||
item["type"] = type_name
|
||||
|
||||
# default value in interface
|
||||
default = in_data["default"]
|
||||
if default is not None:
|
||||
item["value"] = default
|
||||
|
||||
items.append(item)
|
||||
|
||||
# Add autosync attribute
|
||||
items.append(auto_sync_item)
|
||||
|
||||
# Add enumerator items at the end
|
||||
for item in multiselect_enumerators:
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
in_data = event['data']['values']
|
||||
|
||||
# pop out info about creating project structure
|
||||
create_proj_struct = in_data.pop(self.create_project_structure_key)
|
||||
|
||||
# Find hidden items for multiselect enumerators
|
||||
keys_to_process = []
|
||||
for key in in_data:
|
||||
if key.startswith("__hidden__"):
|
||||
keys_to_process.append(key)
|
||||
|
||||
self.log.debug("Preparing data for Multiselect Enumerators")
|
||||
enumerators = {}
|
||||
for key in keys_to_process:
|
||||
new_key = key.replace("__hidden__", "")
|
||||
enumerator_items = in_data.pop(key)
|
||||
enumerators[new_key] = json.loads(enumerator_items)
|
||||
|
||||
# find values set for multiselect enumerator
|
||||
for key, enumerator_items in enumerators.items():
|
||||
in_data[key] = []
|
||||
|
||||
name = "__{}__".format(key)
|
||||
|
||||
for item in enumerator_items:
|
||||
value = in_data.pop(item)
|
||||
if value is True:
|
||||
new_key = item.replace(name, "")
|
||||
in_data[key].append(new_key)
|
||||
|
||||
self.log.debug("Setting Custom Attribute values:")
|
||||
entity = entities[0]
|
||||
for key, value in in_data.items():
|
||||
entity["custom_attributes"][key] = value
|
||||
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
|
||||
|
||||
session.commit()
|
||||
|
||||
# Create project structure
|
||||
self.create_project_specific_config(entities[0]["full_name"], in_data)
|
||||
|
||||
# Trigger Create Project Structure action
|
||||
if create_proj_struct is True:
|
||||
self.trigger_action("create.project.structure", event)
|
||||
|
||||
return True
|
||||
|
||||
def create_project_specific_config(self, project_name, json_data):
|
||||
self.log.debug("*** Creating project specifig configs ***")
|
||||
|
||||
path_proj_configs = os.environ.get('PYPE_PROJECT_CONFIGS', "")
|
||||
|
||||
# Skip if PYPE_PROJECT_CONFIGS is not set
|
||||
# TODO show user OS message
|
||||
if not path_proj_configs:
|
||||
self.log.warning((
|
||||
"Environment variable \"PYPE_PROJECT_CONFIGS\" is not set."
|
||||
" Project specific config can't be set."
|
||||
))
|
||||
return
|
||||
|
||||
path_proj_configs = os.path.normpath(path_proj_configs)
|
||||
# Skip if path does not exist
|
||||
# TODO create if not exist?!!!
|
||||
if not os.path.exists(path_proj_configs):
|
||||
self.log.warning((
|
||||
"Path set in Environment variable \"PYPE_PROJECT_CONFIGS\""
|
||||
" Does not exist."
|
||||
))
|
||||
return
|
||||
|
||||
project_specific_path = os.path.normpath(
|
||||
os.path.join(path_proj_configs, project_name)
|
||||
)
|
||||
if not os.path.exists(project_specific_path):
|
||||
os.makedirs(project_specific_path)
|
||||
self.log.debug((
|
||||
"Project specific config folder for project \"{}\" created."
|
||||
).format(project_name))
|
||||
|
||||
# Anatomy ####################################
|
||||
self.log.debug("--- Processing Anatomy Begins: ---")
|
||||
|
||||
anatomy_dir = os.path.normpath(os.path.join(
|
||||
project_specific_path, "anatomy"
|
||||
))
|
||||
anatomy_path = os.path.normpath(os.path.join(
|
||||
anatomy_dir, "default.yaml"
|
||||
))
|
||||
|
||||
anatomy = None
|
||||
if os.path.exists(anatomy_path):
|
||||
self.log.debug(
|
||||
"Anatomy file already exist. Trying to read: \"{}\"".format(
|
||||
anatomy_path
|
||||
)
|
||||
)
|
||||
# Try to load data
|
||||
with open(anatomy_path, 'r') as file_stream:
|
||||
try:
|
||||
anatomy = yaml.load(file_stream, Loader=yaml.loader.Loader)
|
||||
self.log.debug("Reading Anatomy file was successful")
|
||||
except yaml.YAMLError as exc:
|
||||
self.log.warning(
|
||||
"Reading Yaml file failed: \"{}\"".format(anatomy_path),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not anatomy:
|
||||
self.log.debug("Anatomy is not set. Duplicating default.")
|
||||
# Create Anatomy folder
|
||||
if not os.path.exists(anatomy_dir):
|
||||
self.log.debug(
|
||||
"Creating Anatomy folder: \"{}\"".format(anatomy_dir)
|
||||
)
|
||||
os.makedirs(anatomy_dir)
|
||||
|
||||
source_items = [
|
||||
os.environ["PYPE_CONFIG"], "anatomy", "default.yaml"
|
||||
]
|
||||
|
||||
source_path = os.path.normpath(os.path.join(*source_items))
|
||||
with open(source_path, 'r') as file_stream:
|
||||
source_data = file_stream.read()
|
||||
|
||||
with open(anatomy_path, 'w') as file_stream:
|
||||
file_stream.write(source_data)
|
||||
|
||||
# Presets ####################################
|
||||
self.log.debug("--- Processing Presets Begins: ---")
|
||||
|
||||
project_defaults_dir = os.path.normpath(os.path.join(*[
|
||||
project_specific_path, "presets", "ftrack"
|
||||
]))
|
||||
project_defaults_path = os.path.normpath(os.path.join(*[
|
||||
project_defaults_dir, "project_defaults.json"
|
||||
]))
|
||||
# Create folder if not exist
|
||||
if not os.path.exists(project_defaults_dir):
|
||||
self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
|
||||
project_defaults_dir
|
||||
))
|
||||
os.makedirs(project_defaults_dir)
|
||||
|
||||
with open(project_defaults_path, 'w') as file_stream:
|
||||
json.dump(json_data, file_stream, indent=4)
|
||||
|
||||
self.log.debug("*** Creating project specifig configs Finished ***")
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
PrepareProject(session, plugins_presets).register()
|
||||
|
|
@ -23,13 +23,13 @@ class RVAction(BaseAction):
|
|||
)
|
||||
type = 'Application'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets):
|
||||
""" Constructor
|
||||
|
||||
:param session: ftrack Session
|
||||
:type session: :class:`ftrack_api.Session`
|
||||
"""
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
self.rv_path = None
|
||||
self.config_data = None
|
||||
|
||||
|
|
@ -326,12 +326,12 @@ class RVAction(BaseAction):
|
|||
return paths
|
||||
|
||||
|
||||
def register(session):
|
||||
def register(session, plugins_presets={}):
|
||||
"""Register hooks."""
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
RVAction(session).register()
|
||||
RVAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class SetVersion(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -80,7 +80,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SetVersion(session).register()
|
||||
SetVersion(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -19,61 +19,25 @@ class StartTimer(BaseAction):
|
|||
entity = entities[0]
|
||||
if entity.entity_type.lower() != 'task':
|
||||
return
|
||||
self.start_ftrack_timer(entity)
|
||||
try:
|
||||
self.start_clockify_timer(entity)
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
'Failed starting Clockify timer for task: ' + entity['name']
|
||||
)
|
||||
|
||||
user = self.session.query(
|
||||
"User where username is \"{}\"".format(self.session.api_user)
|
||||
).one()
|
||||
|
||||
user.start_timer(entity, force=True)
|
||||
self.session.commit()
|
||||
|
||||
self.log.info(
|
||||
"Starting Ftrack timer for task: {}".format(entity['name'])
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def start_ftrack_timer(self, task):
|
||||
user_query = 'User where username is "{}"'.format(self.session.api_user)
|
||||
user = self.session.query(user_query).one()
|
||||
self.log.info('Starting Ftrack timer for task: ' + task['name'])
|
||||
user.start_timer(task, force=True)
|
||||
self.session.commit()
|
||||
|
||||
def start_clockify_timer(self, task):
|
||||
# Validate Clockify settings if Clockify is required
|
||||
clockify_timer = os.environ.get('CLOCKIFY_WORKSPACE', None)
|
||||
if clockify_timer is None:
|
||||
return
|
||||
|
||||
from pype.clockify import ClockifyAPI
|
||||
clockapi = ClockifyAPI()
|
||||
if clockapi.verify_api() is False:
|
||||
return
|
||||
task_type = task['type']['name']
|
||||
project_name = task['project']['full_name']
|
||||
|
||||
def get_parents(entity):
|
||||
output = []
|
||||
if entity.entity_type.lower() == 'project':
|
||||
return output
|
||||
output.extend(get_parents(entity['parent']))
|
||||
output.append(entity['name'])
|
||||
|
||||
return output
|
||||
|
||||
desc_items = get_parents(task['parent'])
|
||||
desc_items.append(task['name'])
|
||||
description = '/'.join(desc_items)
|
||||
|
||||
project_id = clockapi.get_project_id(project_name)
|
||||
tag_ids = []
|
||||
tag_ids.append(clockapi.get_tag_id(task_type))
|
||||
clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
self.log.info('Starting Clockify timer for task: ' + task['name'])
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
StartTimer(session).register()
|
||||
StartTimer(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import collections
|
|||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
|
|
@ -19,11 +19,12 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs.local'
|
||||
#: Action label.
|
||||
label = 'Sync HierAttrs - Local'
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (Local)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -305,13 +306,13 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncHierarchicalAttrs(session).register()
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -47,11 +47,12 @@ class SyncToAvalon(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon.local'
|
||||
#: Action label.
|
||||
label = 'SyncToAvalon - Local'
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync To Avalon (Local)'
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/SyncToAvalon-local.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
|
|
@ -59,7 +60,7 @@ class SyncToAvalon(BaseAction):
|
|||
#: Action priority
|
||||
priority = 200
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets):
|
||||
super(SyncToAvalon, self).__init__(session)
|
||||
# reload utils on initialize (in case of server restart)
|
||||
|
||||
|
|
@ -177,17 +178,7 @@ class SyncToAvalon(BaseAction):
|
|||
job['status'] = 'failed'
|
||||
session.commit()
|
||||
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier='sync.hierarchical.attrs.local',
|
||||
selection=event['data']['selection']
|
||||
),
|
||||
source=dict(
|
||||
user=event['source']['user']
|
||||
)
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
self.trigger_action("sync.hierarchical.attrs.local", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
|
|
@ -212,7 +203,7 @@ class SyncToAvalon(BaseAction):
|
|||
self.add_childs_to_importable(child)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -221,7 +212,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncToAvalon(session).register()
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -40,13 +40,13 @@ class TestAction(BaseAction):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
TestAction(session).register()
|
||||
TestAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -14,9 +14,11 @@ class ThumbToChildren(BaseAction):
|
|||
# Action identifier
|
||||
identifier = 'thumb.to.children'
|
||||
# Action label
|
||||
label = 'Thumbnail to Children'
|
||||
label = 'Thumbnail'
|
||||
# Action variant
|
||||
variant = " to Children"
|
||||
# Action icon
|
||||
icon = '{}/ftrack/action_icons/thumbToChildren.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -64,12 +66,12 @@ class ThumbToChildren(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ThumbToChildren(session).register()
|
||||
ThumbToChildren(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
@ -13,9 +13,11 @@ class ThumbToParent(BaseAction):
|
|||
# Action identifier
|
||||
identifier = 'thumb.to.parent'
|
||||
# Action label
|
||||
label = 'Thumbnail to Parent'
|
||||
label = 'Thumbnail'
|
||||
# Action variant
|
||||
variant = " to Parent"
|
||||
# Action icon
|
||||
icon = '{}/ftrack/action_icons/thumbToParent.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
|
|
@ -86,12 +88,12 @@ class ThumbToParent(BaseAction):
|
|||
}
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register action. Called when used as an event plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ThumbToParent(session).register()
|
||||
ThumbToParent(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
@ -29,26 +29,18 @@ class ActionAskWhereIRun(BaseAction):
|
|||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier="show.where.i.run",
|
||||
selection=event["data"]["selection"],
|
||||
event_hub_id=session.event_hub.id
|
||||
),
|
||||
source=dict(
|
||||
user=dict(username=session.api_user)
|
||||
)
|
||||
more_data = {"event_hub_id": session.event_hub.id}
|
||||
self.trigger_action(
|
||||
"show.where.i.run", event, additional_event_data=more_data
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ActionAskWhereIRun(session).register()
|
||||
ActionAskWhereIRun(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -77,10 +77,10 @@ class ActionShowWhereIRun(BaseAction):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ActionShowWhereIRun(session).register()
|
||||
ActionShowWhereIRun(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import collections
|
|||
from pypeapp import config
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseAction, lib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
|
|
@ -20,11 +20,12 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.hierarchical.attrs'
|
||||
#: Action label.
|
||||
label = 'Sync HierAttrs'
|
||||
label = "Pype Admin"
|
||||
variant = '- Sync Hier Attrs (server)'
|
||||
#: Action description.
|
||||
description = 'Synchronize hierarchical attributes'
|
||||
#: Icon
|
||||
icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
|
|
@ -221,7 +222,11 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
session.commit()
|
||||
|
||||
if self.interface_messages:
|
||||
self.show_interface_from_dict(self.interface_messages, event)
|
||||
self.show_interface_from_dict(
|
||||
messages=self.interface_messages,
|
||||
title="something went wrong",
|
||||
event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
|
@ -333,13 +338,13 @@ class SyncHierarchicalAttrs(BaseAction):
|
|||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncHierarchicalAttrs(session).register()
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from pype.ftrack import BaseAction, lib
|
|||
from pype.vendor.ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class Sync_To_Avalon(BaseAction):
|
||||
class SyncToAvalon(BaseAction):
|
||||
'''
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
|
|
@ -48,11 +48,12 @@ class Sync_To_Avalon(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = 'sync.to.avalon'
|
||||
#: Action label.
|
||||
label = 'SyncToAvalon'
|
||||
label = "Pype Admin"
|
||||
variant = "- Sync To Avalon (Server)"
|
||||
#: Action description.
|
||||
description = 'Send data from Ftrack to Avalon'
|
||||
#: Action icon.
|
||||
icon = '{}/ftrack/action_icons/SyncToAvalon.svg'.format(
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get(
|
||||
'PYPE_STATICS_SERVER',
|
||||
'http://localhost:{}'.format(
|
||||
|
|
@ -206,18 +207,8 @@ class Sync_To_Avalon(BaseAction):
|
|||
job['status'] = 'failed'
|
||||
|
||||
session.commit()
|
||||
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier='sync.hierarchical.attrs',
|
||||
selection=event['data']['selection']
|
||||
),
|
||||
source=dict(
|
||||
user=event['source']['user']
|
||||
)
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
|
||||
self.trigger_action("sync.hierarchical.attrs", event)
|
||||
|
||||
if len(message) > 0:
|
||||
message = "Unable to sync: {}".format(message)
|
||||
|
|
@ -242,7 +233,7 @@ class Sync_To_Avalon(BaseAction):
|
|||
self.add_childs_to_importable(child)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
# Validate that session is an instance of ftrack_api.Session. If not,
|
||||
|
|
@ -251,7 +242,7 @@ def register(session, **kw):
|
|||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Sync_To_Avalon(session).register()
|
||||
SyncToAvalon(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
|
|
|
|||
|
|
@ -51,9 +51,9 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
continue
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
DelAvalonIdFromNew(session).register()
|
||||
DelAvalonIdFromNew(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -86,9 +86,9 @@ class NextTaskUpdate(BaseEvent):
|
|||
session.rollback()
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
NextTaskUpdate(session).register()
|
||||
NextTaskUpdate(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -34,9 +34,9 @@ class Radio_buttons(BaseEvent):
|
|||
session.commit()
|
||||
|
||||
|
||||
def register(session):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Radio_buttons(session).register()
|
||||
Radio_buttons(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
|
|
@ -23,7 +23,10 @@ class SyncHierarchicalAttrs(BaseEvent):
|
|||
if not keys:
|
||||
continue
|
||||
|
||||
entity = session.get(ent['entity_type'], ent['entityId'])
|
||||
if not ent['entityType'] in ['task', 'show']:
|
||||
continue
|
||||
|
||||
entity = session.get(self._get_entity_type(ent), ent['entityId'])
|
||||
processable.append(ent)
|
||||
processable_ent[ent['entityId']] = entity
|
||||
|
||||
|
|
@ -115,9 +118,9 @@ class SyncHierarchicalAttrs(BaseEvent):
|
|||
self.update_hierarchical_attribute(child, key, value)
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
SyncHierarchicalAttrs(session).register()
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -118,10 +118,10 @@ class Sync_to_Avalon(BaseEvent):
|
|||
return
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Sync_to_Avalon(session).register()
|
||||
Sync_to_Avalon(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -20,9 +20,9 @@ class Test_Event(BaseEvent):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
Test_Event(session).register()
|
||||
Test_Event(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -45,9 +45,9 @@ class ThumbnailEvents(BaseEvent):
|
|||
pass
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
ThumbnailEvents(session).register()
|
||||
ThumbnailEvents(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from pype.vendor import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
|
|
@ -229,11 +229,11 @@ class UserAssigmentEvent(BaseEvent):
|
|||
return True
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
"""
|
||||
Register plugin. Called when used as an plugin.
|
||||
"""
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
UserAssigmentEvent(session).register()
|
||||
UserAssigmentEvent(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -69,9 +69,9 @@ class VersionToTaskStatus(BaseEvent):
|
|||
path, task_status['name']))
|
||||
|
||||
|
||||
def register(session, **kw):
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
VersionToTaskStatus(session).register()
|
||||
VersionToTaskStatus(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -5,7 +5,9 @@ import importlib
|
|||
from pype.vendor import ftrack_api
|
||||
import time
|
||||
import logging
|
||||
from pypeapp import Logger
|
||||
import inspect
|
||||
from pypeapp import Logger, config
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
|
@ -27,8 +29,8 @@ PYTHONPATH # Path to ftrack_api and paths to all modules used in actions
|
|||
"""
|
||||
|
||||
|
||||
class FtrackServer():
|
||||
def __init__(self, type='action'):
|
||||
class FtrackServer:
|
||||
def __init__(self, server_type='action'):
|
||||
"""
|
||||
- 'type' is by default set to 'action' - Runs Action server
|
||||
- enter 'event' for Event server
|
||||
|
|
@ -43,21 +45,12 @@ class FtrackServer():
|
|||
ftrack_log = logging.getLogger("ftrack_api")
|
||||
ftrack_log.setLevel(logging.WARNING)
|
||||
|
||||
self.type = type
|
||||
self.actionsAvailable = True
|
||||
self.eventsAvailable = True
|
||||
# Separate all paths
|
||||
if "FTRACK_ACTIONS_PATH" in os.environ:
|
||||
all_action_paths = os.environ["FTRACK_ACTIONS_PATH"]
|
||||
self.actionsPaths = all_action_paths.split(os.pathsep)
|
||||
else:
|
||||
self.actionsAvailable = False
|
||||
env_key = "FTRACK_ACTIONS_PATH"
|
||||
if server_type.lower() == 'event':
|
||||
env_key = "FTRACK_EVENTS_PATH"
|
||||
|
||||
if "FTRACK_EVENTS_PATH" in os.environ:
|
||||
all_event_paths = os.environ["FTRACK_EVENTS_PATH"]
|
||||
self.eventsPaths = all_event_paths.split(os.pathsep)
|
||||
else:
|
||||
self.eventsAvailable = False
|
||||
self.server_type = server_type
|
||||
self.env_key = env_key
|
||||
|
||||
def stop_session(self):
|
||||
if self.session.event_hub.connected is True:
|
||||
|
|
@ -67,7 +60,7 @@ class FtrackServer():
|
|||
|
||||
def set_files(self, paths):
|
||||
# Iterate all paths
|
||||
functions = []
|
||||
register_functions_dict = []
|
||||
for path in paths:
|
||||
# add path to PYTHON PATH
|
||||
if path not in sys.path:
|
||||
|
|
@ -92,13 +85,11 @@ class FtrackServer():
|
|||
|
||||
# separate files by register function
|
||||
if 'register' not in mod_functions:
|
||||
msg = (
|
||||
'"{0}" - Missing register method'
|
||||
).format(file, self.type)
|
||||
msg = ('"{}" - Missing register method').format(file)
|
||||
log.warning(msg)
|
||||
continue
|
||||
|
||||
functions.append({
|
||||
register_functions_dict.append({
|
||||
'name': file,
|
||||
'register': mod_functions['register']
|
||||
})
|
||||
|
|
@ -106,45 +97,49 @@ class FtrackServer():
|
|||
msg = 'Loading of file "{}" failed ({})'.format(
|
||||
file, str(e)
|
||||
)
|
||||
log.warning(msg)
|
||||
log.warning(msg, exc_info=e)
|
||||
|
||||
if len(functions) < 1:
|
||||
if len(register_functions_dict) < 1:
|
||||
raise Exception
|
||||
|
||||
# Load presets for setting plugins
|
||||
key = "user"
|
||||
if self.server_type.lower() == "event":
|
||||
key = "server"
|
||||
plugins_presets = config.get_presets().get(
|
||||
"ftrack", {}
|
||||
).get("plugins", {}).get(key, {})
|
||||
|
||||
function_counter = 0
|
||||
for function in functions:
|
||||
for function_dict in register_functions_dict:
|
||||
register = function_dict["register"]
|
||||
try:
|
||||
function['register'](self.session)
|
||||
if len(inspect.signature(register).parameters) == 1:
|
||||
register(self.session)
|
||||
else:
|
||||
register(self.session, plugins_presets=plugins_presets)
|
||||
|
||||
if function_counter%7 == 0:
|
||||
time.sleep(0.1)
|
||||
function_counter += 1
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
msg = '"{}" - register was not successful ({})'.format(
|
||||
function['name'], str(e)
|
||||
function_dict['name'], str(exc)
|
||||
)
|
||||
log.warning(msg)
|
||||
|
||||
def run_server(self):
|
||||
self.session = ftrack_api.Session(auto_connect_event_hub=True,)
|
||||
|
||||
if self.type.lower() == 'event':
|
||||
if self.eventsAvailable is False:
|
||||
msg = (
|
||||
'FTRACK_EVENTS_PATH is not set'
|
||||
', event server won\'t launch'
|
||||
)
|
||||
log.error(msg)
|
||||
return
|
||||
self.set_files(self.eventsPaths)
|
||||
else:
|
||||
if self.actionsAvailable is False:
|
||||
msg = (
|
||||
'FTRACK_ACTIONS_PATH is not set'
|
||||
', action server won\'t launch'
|
||||
)
|
||||
log.error(msg)
|
||||
return
|
||||
self.set_files(self.actionsPaths)
|
||||
paths_str = os.environ.get(self.env_key)
|
||||
if paths_str is None:
|
||||
log.error((
|
||||
"Env var \"{}\" is not set, \"{}\" server won\'t launch"
|
||||
).format(self.env_key, self.server_type))
|
||||
return
|
||||
|
||||
paths = paths_str.split(os.pathsep)
|
||||
self.set_files(paths)
|
||||
|
||||
log.info(60*"*")
|
||||
log.info('Registration of actions/events has finished!')
|
||||
|
|
|
|||
|
|
@ -326,13 +326,26 @@ def import_to_avalon(
|
|||
return output
|
||||
|
||||
|
||||
def get_avalon_attr(session):
|
||||
def get_avalon_attr(session, split_hierarchical=False):
|
||||
custom_attributes = []
|
||||
hier_custom_attributes = []
|
||||
query = 'CustomAttributeGroup where name is "avalon"'
|
||||
all_avalon_attr = session.query(query).one()
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' not in cust_attr['key']:
|
||||
custom_attributes.append(cust_attr)
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
|
||||
if split_hierarchical:
|
||||
if cust_attr["is_hierarchical"]:
|
||||
hier_custom_attributes.append(cust_attr)
|
||||
continue
|
||||
|
||||
custom_attributes.append(cust_attr)
|
||||
|
||||
if split_hierarchical:
|
||||
# return tuple
|
||||
return custom_attributes, hier_custom_attributes
|
||||
|
||||
return custom_attributes
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -21,9 +21,9 @@ class BaseAction(BaseHandler):
|
|||
icon = None
|
||||
type = 'Action'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
|
||||
if self.label is None:
|
||||
raise ValueError(
|
||||
|
|
|
|||
|
|
@ -26,10 +26,10 @@ class AppAction(BaseHandler):
|
|||
preactions = ['start.timer']
|
||||
|
||||
def __init__(
|
||||
self, session, label, name, executable,
|
||||
variant=None, icon=None, description=None, preactions=[]
|
||||
self, session, label, name, executable, variant=None,
|
||||
icon=None, description=None, preactions=[], plugins_presets={}
|
||||
):
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
|
||||
if label is None:
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class BaseHandler(object):
|
|||
ignore_me = False
|
||||
preactions = []
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
self._session = session
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
|
|
@ -37,13 +37,23 @@ class BaseHandler(object):
|
|||
# Using decorator
|
||||
self.register = self.register_decorator(self.register)
|
||||
self.launch = self.launch_log(self.launch)
|
||||
self.plugins_presets = plugins_presets
|
||||
|
||||
# Decorator
|
||||
def register_decorator(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_register(*args, **kwargs):
|
||||
|
||||
presets_data = self.plugins_presets.get(self.__class__.__name__)
|
||||
if presets_data:
|
||||
for key, value in presets_data.items():
|
||||
if not hasattr(self, key):
|
||||
continue
|
||||
setattr(self, key, value)
|
||||
|
||||
if self.ignore_me:
|
||||
return
|
||||
|
||||
label = self.__class__.__name__
|
||||
if hasattr(self, 'label'):
|
||||
if self.variant is None:
|
||||
|
|
@ -84,13 +94,12 @@ class BaseHandler(object):
|
|||
def launch_log(self, func):
|
||||
@functools.wraps(func)
|
||||
def wrapper_launch(*args, **kwargs):
|
||||
label = self.__class__.__name__
|
||||
if hasattr(self, 'label'):
|
||||
label = self.label
|
||||
if hasattr(self, 'variant'):
|
||||
label = '{} {}'.format(self.label, self.variant)
|
||||
else:
|
||||
label = self.label
|
||||
else:
|
||||
label = self.__class__.__name__
|
||||
if self.variant is not None:
|
||||
label = '{} {}'.format(self.label, self.variant)
|
||||
|
||||
self.log.info(('{} "{}": Launched').format(self.type, label))
|
||||
try:
|
||||
|
|
@ -131,6 +140,13 @@ class BaseHandler(object):
|
|||
|
||||
# Custom validations
|
||||
result = self.preregister()
|
||||
if result is None:
|
||||
self.log.debug((
|
||||
"\"{}\" 'preregister' method returned 'None'. Expected it"
|
||||
" didn't fail and continue as preregister returned True."
|
||||
).format(self.__class__.__name__))
|
||||
return
|
||||
|
||||
if result is True:
|
||||
return
|
||||
msg = "Pre-register conditions were not met"
|
||||
|
|
@ -311,30 +327,13 @@ class BaseHandler(object):
|
|||
|
||||
# Launch preactions
|
||||
for preaction in self.preactions:
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier=preaction,
|
||||
selection=selection
|
||||
),
|
||||
source=dict(
|
||||
user=dict(username=session.api_user)
|
||||
)
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
self.trigger_action(preaction, event)
|
||||
|
||||
# Relaunch this action
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.launch',
|
||||
data=dict(
|
||||
actionIdentifier=self.identifier,
|
||||
selection=selection,
|
||||
preactions_launched=True
|
||||
),
|
||||
source=dict(
|
||||
user=dict(username=session.api_user)
|
||||
)
|
||||
additional_data = {"preactions_launched": True}
|
||||
self.trigger_action(
|
||||
self.identifier, event, additional_event_data=additional_data
|
||||
)
|
||||
session.event_hub.publish(event, on_error='ignore')
|
||||
|
||||
return False
|
||||
|
||||
|
|
@ -495,7 +494,8 @@ class BaseHandler(object):
|
|||
)
|
||||
|
||||
def show_interface_from_dict(
|
||||
self, messages, title="", event=None, user=None, username=None, user_id=None
|
||||
self, messages, title="", event=None,
|
||||
user=None, username=None, user_id=None
|
||||
):
|
||||
if not messages:
|
||||
self.log.debug("No messages to show! (messages dict is empty)")
|
||||
|
|
@ -522,3 +522,60 @@ class BaseHandler(object):
|
|||
items.append(message)
|
||||
|
||||
self.show_interface(items, title, event, user, username, user_id)
|
||||
|
||||
def trigger_action(
|
||||
self, action_name, event=None, session=None,
|
||||
selection=None, user_data=None,
|
||||
topic="ftrack.action.launch", additional_event_data={},
|
||||
on_error="ignore"
|
||||
):
|
||||
self.log.debug("Triggering action \"{}\" Begins".format(action_name))
|
||||
|
||||
if not session:
|
||||
session = self.session
|
||||
|
||||
# Getting selection and user data
|
||||
_selection = None
|
||||
_user_data = None
|
||||
|
||||
if event:
|
||||
_selection = event.get("data", {}).get("selection")
|
||||
_user_data = event.get("source", {}).get("user")
|
||||
|
||||
if selection is not None:
|
||||
_selection = selection
|
||||
|
||||
if user_data is not None:
|
||||
_user_data = user_data
|
||||
|
||||
# Without selection and user data skip triggering
|
||||
msg = "Can't trigger \"{}\" action without {}."
|
||||
if _selection is None:
|
||||
self.log.error(msg.format(action_name, "selection"))
|
||||
return
|
||||
|
||||
if _user_data is None:
|
||||
self.log.error(msg.format(action_name, "user data"))
|
||||
return
|
||||
|
||||
_event_data = {
|
||||
"actionIdentifier": action_name,
|
||||
"selection": _selection
|
||||
}
|
||||
|
||||
# Add additional data
|
||||
if additional_event_data:
|
||||
_event_data.update(additional_event_data)
|
||||
|
||||
# Create and trigger event
|
||||
session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=_event_data,
|
||||
source=dict(user=_user_data)
|
||||
),
|
||||
on_error=on_error
|
||||
)
|
||||
self.log.debug(
|
||||
"Action \"{}\" Triggered successfully".format(action_name)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -15,9 +15,9 @@ class BaseEvent(BaseHandler):
|
|||
|
||||
type = 'Event'
|
||||
|
||||
def __init__(self, session):
|
||||
def __init__(self, session, plugins_presets={}):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
super().__init__(session)
|
||||
super().__init__(session, plugins_presets)
|
||||
|
||||
# Decorator
|
||||
def launch_log(self, func):
|
||||
|
|
|
|||
433
pype/ftrack/lib/io_nonsingleton.py
Normal file
|
|
@ -0,0 +1,433 @@
|
|||
"""
|
||||
Wrapper around interactions with the database
|
||||
|
||||
Copy of io module in avalon-core.
|
||||
- In this case not working as singleton with api.Session!
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
import functools
|
||||
import contextlib
|
||||
|
||||
from avalon import schema
|
||||
from avalon.vendor import requests
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
|
||||
|
||||
def auto_reconnect(func):
|
||||
"""Handling auto reconnect in 3 retry times"""
|
||||
@functools.wraps(func)
|
||||
def decorated(*args, **kwargs):
|
||||
object = args[0]
|
||||
for retry in range(3):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
object.log.error("Reconnecting..")
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
raise
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
class DbConnector(object):
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
self.Session = {}
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
self._sentry_logging_handler = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
if self._is_installed:
|
||||
return
|
||||
|
||||
logging.basicConfig()
|
||||
self.Session.update(self._from_environment())
|
||||
|
||||
timeout = int(self.Session["AVALON_TIMEOUT"])
|
||||
self._mongo_client = pymongo.MongoClient(
|
||||
self.Session["AVALON_MONGO"], serverSelectionTimeoutMS=timeout)
|
||||
|
||||
for retry in range(3):
|
||||
try:
|
||||
t1 = time.time()
|
||||
self._mongo_client.server_info()
|
||||
|
||||
except Exception:
|
||||
self.log.error("Retrying..")
|
||||
time.sleep(1)
|
||||
timeout *= 1.5
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
else:
|
||||
raise IOError(
|
||||
"ERROR: Couldn't connect to %s in "
|
||||
"less than %.3f ms" % (self.Session["AVALON_MONGO"], timeout))
|
||||
|
||||
self.log.info("Connected to %s, delay %.3f s" % (
|
||||
self.Session["AVALON_MONGO"], time.time() - t1))
|
||||
|
||||
self._install_sentry()
|
||||
|
||||
self._database = self._mongo_client[self.Session["AVALON_DB"]]
|
||||
self._is_installed = True
|
||||
|
||||
def _install_sentry(self):
|
||||
if "AVALON_SENTRY" not in self.Session:
|
||||
return
|
||||
|
||||
try:
|
||||
from raven import Client
|
||||
from raven.handlers.logging import SentryHandler
|
||||
from raven.conf import setup_logging
|
||||
except ImportError:
|
||||
# Note: There was a Sentry address in this Session
|
||||
return self.log.warning("Sentry disabled, raven not installed")
|
||||
|
||||
client = Client(self.Session["AVALON_SENTRY"])
|
||||
|
||||
# Transmit log messages to Sentry
|
||||
handler = SentryHandler(client)
|
||||
handler.setLevel(logging.WARNING)
|
||||
|
||||
setup_logging(handler)
|
||||
|
||||
self._sentry_client = client
|
||||
self._sentry_logging_handler = handler
|
||||
self.log.info(
|
||||
"Connected to Sentry @ %s" % self.Session["AVALON_SENTRY"]
|
||||
)
|
||||
|
||||
def _from_environment(self):
|
||||
Session = {
|
||||
item[0]: os.getenv(item[0], item[1])
|
||||
for item in (
|
||||
# Root directory of projects on disk
|
||||
("AVALON_PROJECTS", None),
|
||||
|
||||
# Name of current Project
|
||||
("AVALON_PROJECT", ""),
|
||||
|
||||
# Name of current Asset
|
||||
("AVALON_ASSET", ""),
|
||||
|
||||
# Name of current silo
|
||||
("AVALON_SILO", ""),
|
||||
|
||||
# Name of current task
|
||||
("AVALON_TASK", None),
|
||||
|
||||
# Name of current app
|
||||
("AVALON_APP", None),
|
||||
|
||||
# Path to working directory
|
||||
("AVALON_WORKDIR", None),
|
||||
|
||||
# Name of current Config
|
||||
# TODO(marcus): Establish a suitable default config
|
||||
("AVALON_CONFIG", "no_config"),
|
||||
|
||||
# Name of Avalon in graphical user interfaces
|
||||
# Use this to customise the visual appearance of Avalon
|
||||
# to better integrate with your surrounding pipeline
|
||||
("AVALON_LABEL", "Avalon"),
|
||||
|
||||
# Used during any connections to the outside world
|
||||
("AVALON_TIMEOUT", "1000"),
|
||||
|
||||
# Address to Asset Database
|
||||
("AVALON_MONGO", "mongodb://localhost:27017"),
|
||||
|
||||
# Name of database used in MongoDB
|
||||
("AVALON_DB", "avalon"),
|
||||
|
||||
# Address to Sentry
|
||||
("AVALON_SENTRY", None),
|
||||
|
||||
# Address to Deadline Web Service
|
||||
# E.g. http://192.167.0.1:8082
|
||||
("AVALON_DEADLINE", None),
|
||||
|
||||
# Enable features not necessarily stable. The user's own risk
|
||||
("AVALON_EARLY_ADOPTER", None),
|
||||
|
||||
# Address of central asset repository, contains
|
||||
# the following interface:
|
||||
# /upload
|
||||
# /download
|
||||
# /manager (optional)
|
||||
("AVALON_LOCATION", "http://127.0.0.1"),
|
||||
|
||||
# Boolean of whether to upload published material
|
||||
# to central asset repository
|
||||
("AVALON_UPLOAD", None),
|
||||
|
||||
# Generic username and password
|
||||
("AVALON_USERNAME", "avalon"),
|
||||
("AVALON_PASSWORD", "secret"),
|
||||
|
||||
# Unique identifier for instances in working files
|
||||
("AVALON_INSTANCE_ID", "avalon.instance"),
|
||||
("AVALON_CONTAINER_ID", "avalon.container"),
|
||||
|
||||
# Enable debugging
|
||||
("AVALON_DEBUG", None),
|
||||
|
||||
) if os.getenv(item[0], item[1]) is not None
|
||||
}
|
||||
|
||||
Session["schema"] = "avalon-core:session-1.0"
|
||||
try:
|
||||
schema.validate(Session)
|
||||
except schema.ValidationError as e:
|
||||
# TODO(marcus): Make this mandatory
|
||||
self.log.warning(e)
|
||||
|
||||
return Session
|
||||
|
||||
def uninstall(self):
|
||||
"""Close any connection to the database"""
|
||||
try:
|
||||
self._mongo_client.close()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self._mongo_client = None
|
||||
self._database = None
|
||||
self._is_installed = False
|
||||
|
||||
def active_project(self):
|
||||
"""Return the name of the active project"""
|
||||
return self.Session["AVALON_PROJECT"]
|
||||
|
||||
def activate_project(self, project_name):
|
||||
self.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
def projects(self):
|
||||
"""List available projects
|
||||
|
||||
Returns:
|
||||
list of project documents
|
||||
|
||||
"""
|
||||
|
||||
collection_names = self.collections()
|
||||
for project in collection_names:
|
||||
if project in ("system.indexes",):
|
||||
continue
|
||||
|
||||
# Each collection will have exactly one project document
|
||||
document = self.find_project(project)
|
||||
|
||||
if document is not None:
|
||||
yield document
|
||||
|
||||
def locate(self, path):
|
||||
"""Traverse a hierarchy from top-to-bottom
|
||||
|
||||
Example:
|
||||
representation = locate(["hulk", "Bruce", "modelDefault", 1, "ma"])
|
||||
|
||||
Returns:
|
||||
representation (ObjectId)
|
||||
|
||||
"""
|
||||
|
||||
components = zip(
|
||||
("project", "asset", "subset", "version", "representation"),
|
||||
path
|
||||
)
|
||||
|
||||
parent = None
|
||||
for type_, name in components:
|
||||
latest = (type_ == "version") and name in (None, -1)
|
||||
|
||||
try:
|
||||
if latest:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
sort=[("name", -1)]
|
||||
)["_id"]
|
||||
else:
|
||||
parent = self.find_one(
|
||||
filter={
|
||||
"type": type_,
|
||||
"name": name,
|
||||
"parent": parent
|
||||
},
|
||||
projection={"_id": 1},
|
||||
)["_id"]
|
||||
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
return parent
|
||||
|
||||
@auto_reconnect
|
||||
def collections(self):
|
||||
return self._database.collection_names()
|
||||
|
||||
@auto_reconnect
|
||||
def find_project(self, project):
|
||||
return self._database[project].find_one({"type": "project"})
|
||||
|
||||
@auto_reconnect
|
||||
def insert_one(self, item):
|
||||
assert isinstance(item, dict), "item must be of type <dict>"
|
||||
schema.validate(item)
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_one(item)
|
||||
|
||||
@auto_reconnect
|
||||
def insert_many(self, items, ordered=True):
|
||||
# check if all items are valid
|
||||
assert isinstance(items, list), "`items` must be of type <list>"
|
||||
for item in items:
|
||||
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||
schema.validate(item)
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].insert_many(
|
||||
items,
|
||||
ordered=ordered)
|
||||
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
|
||||
return self._database[self.Session["AVALON_PROJECT"]].find_one(
|
||||
filter=filter,
|
||||
projection=projection,
|
||||
sort=sort
|
||||
)
|
||||
|
||||
@auto_reconnect
|
||||
def save(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].save(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def replace_one(self, filter, replacement):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].replace_one(
|
||||
filter, replacement)
|
||||
|
||||
@auto_reconnect
|
||||
def update_many(self, filter, update):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].update_many(
|
||||
filter, update)
|
||||
|
||||
@auto_reconnect
|
||||
def distinct(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].distinct(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def drop(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].drop(
|
||||
*args, **kwargs)
|
||||
|
||||
@auto_reconnect
|
||||
def delete_many(self, *args, **kwargs):
|
||||
return self._database[self.Session["AVALON_PROJECT"]].delete_many(
|
||||
*args, **kwargs)
|
||||
|
||||
def parenthood(self, document):
|
||||
assert document is not None, "This is a bug"
|
||||
|
||||
parents = list()
|
||||
|
||||
while document.get("parent") is not None:
|
||||
document = self.find_one({"_id": document["parent"]})
|
||||
|
||||
if document is None:
|
||||
break
|
||||
|
||||
parents.append(document)
|
||||
|
||||
return parents
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir(self):
|
||||
tempdir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield tempdir
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
def download(self, src, dst):
|
||||
"""Download `src` to `dst`
|
||||
|
||||
Arguments:
|
||||
src (str): URL to source file
|
||||
dst (str): Absolute path to destination file
|
||||
|
||||
Yields tuple (progress, error):
|
||||
progress (int): Between 0-100
|
||||
error (Exception): Any exception raised when first making connection
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
src,
|
||||
stream=True,
|
||||
auth=requests.auth.HTTPBasicAuth(
|
||||
self.Session["AVALON_USERNAME"],
|
||||
self.Session["AVALON_PASSWORD"]
|
||||
)
|
||||
)
|
||||
except requests.ConnectionError as e:
|
||||
yield None, e
|
||||
return
|
||||
|
||||
with self.tempdir() as dirname:
|
||||
tmp = os.path.join(dirname, os.path.basename(src))
|
||||
|
||||
with open(tmp, "wb") as f:
|
||||
total_length = response.headers.get("content-length")
|
||||
|
||||
if total_length is None: # no content length header
|
||||
f.write(response.content)
|
||||
else:
|
||||
downloaded = 0
|
||||
total_length = int(total_length)
|
||||
for data in response.iter_content(chunk_size=4096):
|
||||
downloaded += len(data)
|
||||
f.write(data)
|
||||
|
||||
yield int(100.0 * downloaded / total_length), None
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(dst))
|
||||
except OSError as e:
|
||||
# An already existing destination directory is fine.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
shutil.copy(tmp, dst)
|
||||
|
|
@ -88,9 +88,11 @@ class FtrackModule:
|
|||
def set_action_server(self):
|
||||
try:
|
||||
self.action_server.run_server()
|
||||
except Exception:
|
||||
msg = 'Ftrack Action server crashed! Please try to start again.'
|
||||
log.error(msg)
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
"Ftrack Action server crashed! Please try to start again.",
|
||||
exc_info=True
|
||||
)
|
||||
# TODO show message to user
|
||||
self.bool_action_server = False
|
||||
self.set_menu_visibility()
|
||||
|
|
|
|||
84
pype/lib.py
|
|
@ -31,7 +31,9 @@ def _subprocess(args):
|
|||
output = proc.communicate()[0]
|
||||
|
||||
if proc.returncode != 0:
|
||||
log.error(output)
|
||||
raise ValueError("\"{}\" was not successful: {}".format(args, output))
|
||||
return output
|
||||
|
||||
|
||||
def get_hierarchy(asset_name=None):
|
||||
|
|
@ -421,7 +423,7 @@ def get_version_from_path(file):
|
|||
v: version number in string ('001')
|
||||
|
||||
"""
|
||||
pattern = re.compile(r"[\._]v([0-9]*)")
|
||||
pattern = re.compile(r"[\._]v([0-9]+)")
|
||||
try:
|
||||
return pattern.findall(file)[0]
|
||||
except IndexError:
|
||||
|
|
@ -467,10 +469,18 @@ def filter_pyblish_plugins(plugins):
|
|||
|
||||
host = api.current_host()
|
||||
|
||||
presets = config.get_presets().get('plugins', {}).get(host, {}).get(
|
||||
"publish", {}
|
||||
)
|
||||
|
||||
# iterate over plugins
|
||||
for plugin in plugins[:]:
|
||||
# skip if there are no presets to process
|
||||
if not presets:
|
||||
continue
|
||||
|
||||
try:
|
||||
config_data = config.get_presets()['plugins'][host]["publish"][plugin.__name__] # noqa: E501
|
||||
config_data = presets[plugin.__name__] # noqa: E501
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
|
|
@ -483,3 +493,73 @@ def filter_pyblish_plugins(plugins):
|
|||
option, value, plugin.__name__))
|
||||
|
||||
setattr(plugin, option, value)
|
||||
|
||||
|
||||
def get_subsets(asset_name,
|
||||
regex_filter=None,
|
||||
version=None,
|
||||
representations=["exr", "dpx"]):
|
||||
"""
|
||||
Query subsets with filter on name.
|
||||
|
||||
The method will return all found subsets and its defined version and subsets. Version could be specified with number. Representation can be filtered.
|
||||
|
||||
Arguments:
|
||||
asset_name (str): asset (shot) name
|
||||
regex_filter (raw): raw string with filter pattern
|
||||
version (str or int): `last` or number of version
|
||||
representations (list): list for all representations
|
||||
|
||||
Returns:
|
||||
dict: subsets with version and representaions in keys
|
||||
"""
|
||||
from avalon import io
|
||||
|
||||
# query asset from db
|
||||
asset_io = io.find_one({"type": "asset",
|
||||
"name": asset_name})
|
||||
|
||||
# check if anything returned
|
||||
assert asset_io, "Asset not existing. \
|
||||
Check correct name: `{}`".format(asset_name)
|
||||
|
||||
# create subsets query filter
|
||||
filter_query = {"type": "subset", "parent": asset_io["_id"]}
|
||||
|
||||
# add reggex filter string into query filter
|
||||
if regex_filter:
|
||||
filter_query.update({"name": {"$regex": r"{}".format(regex_filter)}})
|
||||
else:
|
||||
filter_query.update({"name": {"$regex": r'.*'}})
|
||||
|
||||
# query all assets
|
||||
subsets = [s for s in io.find(filter_query)]
|
||||
|
||||
assert subsets, "No subsets found. Check correct filter. Try this for start `r'.*'`: asset: `{}`".format(asset_name)
|
||||
|
||||
output_dict = {}
|
||||
# Process subsets
|
||||
for subset in subsets:
|
||||
if not version:
|
||||
version_sel = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
else:
|
||||
assert isinstance(version, int), "version needs to be `int` type"
|
||||
version_sel = io.find_one({"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"name": int(version)})
|
||||
|
||||
find_dict = {"type": "representation",
|
||||
"parent": version_sel["_id"]}
|
||||
|
||||
filter_repr = {"$or": [{"name": repr} for repr in representations]}
|
||||
|
||||
find_dict.update(filter_repr)
|
||||
repres_out = [i for i in io.find(find_dict)]
|
||||
|
||||
if len(repres_out) > 0:
|
||||
output_dict[subset["name"]] = {"version": version_sel,
|
||||
"representaions": repres_out}
|
||||
|
||||
return output_dict
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ def install():
|
|||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
||||
workfile_settings = lib.WorkfileSettings()
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"write",
|
||||
|
|
@ -121,7 +121,7 @@ def install():
|
|||
nuke.addOnCreate(launch_workfiles_app, nodeClass="Root")
|
||||
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(lib.set_context_settings, nodeClass="Root")
|
||||
nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root")
|
||||
|
||||
menu.install()
|
||||
|
||||
|
|
|
|||
895
pype/nuke/lib.py
|
|
@ -9,7 +9,7 @@ log = Logger().get_logger(__name__, "nuke")
|
|||
def install():
|
||||
menubar = nuke.menu("Nuke")
|
||||
menu = menubar.findItem(Session["AVALON_LABEL"])
|
||||
|
||||
workfile_settings = lib.WorkfileSettings()
|
||||
# replace reset resolution from avalon core to pype's
|
||||
name = "Reset Resolution"
|
||||
new_name = "Set Resolution"
|
||||
|
|
@ -20,7 +20,7 @@ def install():
|
|||
log.debug("Changing Item: {}".format(rm_item))
|
||||
# rm_item[1].setEnabled(False)
|
||||
menu.removeItem(rm_item[1].name())
|
||||
menu.addCommand(new_name, lib.reset_resolution, index=(rm_item[0]))
|
||||
menu.addCommand(new_name, workfile_settings.reset_resolution, index=(rm_item[0]))
|
||||
|
||||
# replace reset frame range from avalon core to pype's
|
||||
name = "Reset Frame Range"
|
||||
|
|
@ -31,20 +31,28 @@ def install():
|
|||
log.debug("Changing Item: {}".format(rm_item))
|
||||
# rm_item[1].setEnabled(False)
|
||||
menu.removeItem(rm_item[1].name())
|
||||
menu.addCommand(new_name, lib.reset_frame_range_handles, index=(rm_item[0]))
|
||||
menu.addCommand(new_name, workfile_settings.reset_frame_range_handles, index=(rm_item[0]))
|
||||
|
||||
# add colorspace menu item
|
||||
name = "Set colorspace"
|
||||
menu.addCommand(
|
||||
name, lib.set_colorspace,
|
||||
name, workfile_settings.set_colorspace,
|
||||
index=(rm_item[0]+2)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
|
||||
# add workfile builder menu item
|
||||
name = "Build First Workfile.."
|
||||
menu.addCommand(
|
||||
name, lib.BuildWorkfile().process,
|
||||
index=(rm_item[0]+7)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
|
||||
# add item that applies all setting above
|
||||
name = "Apply all settings"
|
||||
menu.addCommand(
|
||||
name, lib.set_context_settings, index=(rm_item[0]+3)
|
||||
name, workfile_settings.set_context_settings, index=(rm_item[0]+3)
|
||||
)
|
||||
log.debug("Adding menu item: {}".format(name))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
from pypeapp import Logger
|
||||
import hiero
|
||||
from avalon.tools import workfiles
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
|
|
|
|||
|
|
@ -189,7 +189,7 @@ def add_submission():
|
|||
|
||||
class PublishAction(QtWidgets.QAction):
|
||||
"""
|
||||
Action with is showing as menu item
|
||||
Action with is showing as menu item
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
|
@ -287,3 +287,59 @@ def _show_no_gui():
|
|||
|
||||
messagebox.setStandardButtons(messagebox.Ok)
|
||||
messagebox.exec_()
|
||||
|
||||
|
||||
def CreateNukeWorkfile(nodes=None,
|
||||
nodes_effects=None,
|
||||
to_timeline=False,
|
||||
**kwargs):
|
||||
''' Creating nuke workfile with particular version with given nodes
|
||||
Also it is creating timeline track items as precomps.
|
||||
|
||||
Arguments:
|
||||
nodes(list of dict): each key in dict is knob order is important
|
||||
to_timeline(type): will build trackItem with metadata
|
||||
|
||||
Returns:
|
||||
bool: True if done
|
||||
|
||||
Raises:
|
||||
Exception: with traceback
|
||||
|
||||
'''
|
||||
import hiero.core
|
||||
from avalon.nuke import imprint
|
||||
from pype.nuke import (
|
||||
lib as nklib
|
||||
)
|
||||
|
||||
# check if the file exists if does then Raise "File exists!"
|
||||
if os.path.exists(filepath):
|
||||
raise FileExistsError("File already exists: `{}`".format(filepath))
|
||||
|
||||
# if no representations matching then
|
||||
# Raise "no representations to be build"
|
||||
if len(representations) == 0:
|
||||
raise AttributeError("Missing list of `representations`")
|
||||
|
||||
# check nodes input
|
||||
if len(nodes) == 0:
|
||||
log.warning("Missing list of `nodes`")
|
||||
|
||||
# create temp nk file
|
||||
nuke_script = hiero.core.nuke.ScriptWriter()
|
||||
|
||||
# create root node and save all metadata
|
||||
root_node = hiero.core.nuke.RootNode()
|
||||
|
||||
root_path = os.environ["AVALON_PROJECTS"]
|
||||
|
||||
nuke_script.addNode(root_node)
|
||||
|
||||
# here to call pype.nuke.lib.BuildWorkfile
|
||||
script_builder = nklib.BuildWorkfile(
|
||||
root_node=root_node,
|
||||
root_path=root_path,
|
||||
nodes=nuke_script.getNodes(),
|
||||
**kwargs
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,19 +1,22 @@
|
|||
"""Host API required Work Files tool"""
|
||||
import os
|
||||
|
||||
import hiero
|
||||
|
||||
from avalon import api
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return [".hrox"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
return hiero.core.projects()[-1]
|
||||
# There are no methods for querying unsaved changes to a project, so
|
||||
# enforcing to always save.
|
||||
return True
|
||||
|
||||
|
||||
def save(filepath):
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
if project:
|
||||
project.saveAs(filepath)
|
||||
else:
|
||||
|
|
@ -22,40 +25,20 @@ def save(filepath):
|
|||
|
||||
|
||||
def open(filepath):
|
||||
try:
|
||||
hiero.core.openProject(filepath)
|
||||
return True
|
||||
except Exception as e:
|
||||
try:
|
||||
from PySide.QtGui import *
|
||||
from PySide.QtCore import *
|
||||
except:
|
||||
from PySide2.QtGui import *
|
||||
from PySide2.QtWidgets import *
|
||||
from PySide2.QtCore import *
|
||||
|
||||
prompt = "Cannot open the selected file: `{}`".format(e)
|
||||
hiero.core.log.error(prompt)
|
||||
dialog = QMessageBox.critical(
|
||||
hiero.ui.mainWindow(), "Error", unicode(prompt))
|
||||
hiero.core.openProject(filepath)
|
||||
return True
|
||||
|
||||
|
||||
def current_file():
|
||||
import os
|
||||
import hiero
|
||||
|
||||
current_file = hiero.core.projects()[-1].path()
|
||||
normalised = os.path.normpath(current_file)
|
||||
|
||||
# Unsaved current file
|
||||
if normalised is '':
|
||||
return "NOT SAVED"
|
||||
if normalised == "":
|
||||
return None
|
||||
|
||||
return normalised
|
||||
|
||||
|
||||
|
||||
def work_root():
|
||||
from avalon import api
|
||||
|
||||
return os.path.normpath(api.Session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
|
|||
|
|
@ -106,11 +106,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
tasks_to_create = []
|
||||
for child in entity['children']:
|
||||
if child.entity_type.lower() == 'task':
|
||||
existing_tasks.append(child['name'])
|
||||
existing_tasks.append(child['name'].lower())
|
||||
# existing_tasks.append(child['type']['name'])
|
||||
|
||||
for task in tasks:
|
||||
if task in existing_tasks:
|
||||
if task.lower() in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
continue
|
||||
tasks_to_create.append(task)
|
||||
|
|
|
|||
|
|
@ -94,7 +94,8 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
|
||||
args = [executable, scriptpath, json_data]
|
||||
self.log.debug("Executing: {}".format(args))
|
||||
pype.api.subprocess(args)
|
||||
output = pype.api.subprocess(args)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
repre_update = {
|
||||
"files": movieFileBurnin,
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ import pyblish.api
|
|||
from avalon import io
|
||||
|
||||
|
||||
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
"""Create entities in Avalon based on collected data."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
label = "Integrate Hierarchy To Avalon"
|
||||
order = pyblish.api.ExtractorOrder - 0.01
|
||||
label = "Extract Hierarchy To Avalon"
|
||||
families = ["clip", "shot"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -170,8 +170,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
subprcs_cmd = " ".join(mov_args)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprcs_cmd))
|
||||
pype.api.subprocess(subprcs_cmd)
|
||||
self.log.debug("Executing: {}".format(subprcs_cmd))
|
||||
output = pype.api.subprocess(subprcs_cmd)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
# create representation data
|
||||
repre_new.update({
|
||||
|
|
|
|||
|
|
@ -63,6 +63,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"rig",
|
||||
"plate",
|
||||
"look",
|
||||
"lut",
|
||||
"audio"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
|
|
|
|||
|
|
@ -231,28 +231,24 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
# Get a submission job
|
||||
data = instance.data.copy()
|
||||
job = instance.data.get("deadlineSubmissionJob")
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
submission_type = "deadline"
|
||||
|
||||
if not job:
|
||||
if not render_job:
|
||||
# No deadline job. Try Muster: musterSubmissionJob
|
||||
job = data.pop("musterSubmissionJob")
|
||||
render_job = data.pop("musterSubmissionJob")
|
||||
submission_type = "muster"
|
||||
if not job:
|
||||
if not render_job:
|
||||
raise RuntimeError("Can't continue without valid Deadline "
|
||||
"or Muster submission prior to this "
|
||||
"plug-in.")
|
||||
|
||||
if submission_type == "deadline":
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
self.DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert self.DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
self._submit_deadline_post_job(instance, job)
|
||||
|
||||
if submission_type == "muster":
|
||||
render_job = data.pop("musterSubmissionJob")
|
||||
self._submit_deadline_post_job(instance, render_job)
|
||||
|
||||
asset = data.get("asset") or api.Session["AVALON_ASSET"]
|
||||
subset = data["subset"]
|
||||
|
|
|
|||
|
|
@ -7,15 +7,30 @@ import pyblish.api
|
|||
import pype.maya.lib as lib
|
||||
import appdirs
|
||||
import platform
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
# mapping between Maya rendere names and Muster template names
|
||||
muster_maya_mapping = {
|
||||
"arnold": "Maya Arnold",
|
||||
"mentalray": "Maya Mr",
|
||||
"renderman": "Maya Renderman",
|
||||
"redshift": "Maya Redshift"
|
||||
}
|
||||
# mapping between Maya renderer names and Muster template ids
|
||||
def _get_template_id(renderer):
|
||||
"""
|
||||
Return muster template ID based on renderer name.
|
||||
|
||||
:param renderer: renderer name
|
||||
:type renderer: str
|
||||
:returns: muster template id
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
templates = config.get_presets()["muster"]["templates_mapping"]
|
||||
if not templates:
|
||||
raise RuntimeError(("Muster template mapping missing in pype-config "
|
||||
"`presets/muster/templates_mapping.json`"))
|
||||
try:
|
||||
template_id = templates[renderer]
|
||||
except KeyError:
|
||||
raise RuntimeError("Unmapped renderer - missing template id")
|
||||
|
||||
return template_id
|
||||
|
||||
|
||||
def _get_script():
|
||||
|
|
@ -213,12 +228,10 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
:rtype: int
|
||||
:raises: Exception if template ID isn't found
|
||||
"""
|
||||
try:
|
||||
self.log.info("Trying to find template for [{}]".format(renderer))
|
||||
mapped = muster_maya_mapping.get(renderer)
|
||||
return self._templates.get(mapped)
|
||||
except ValueError:
|
||||
raise Exception('Unimplemented renderer {}'.format(renderer))
|
||||
self.log.info("Trying to find template for [{}]".format(renderer))
|
||||
mapped = _get_template_id(renderer)
|
||||
self.log.info("got id [{}]".format(mapped))
|
||||
return self._templates.get(mapped)
|
||||
|
||||
def _submit(self, payload):
|
||||
"""
|
||||
|
|
@ -253,15 +266,15 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
self.MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL")
|
||||
|
||||
if self.MUSTER_REST_URL is None:
|
||||
self.log.debug(
|
||||
self.log.error(
|
||||
"\"MUSTER_REST_URL\" is not found. Skipping "
|
||||
"\"{}\".".format(instance)
|
||||
"[{}]".format(instance)
|
||||
)
|
||||
return
|
||||
raise RuntimeError("MUSTER_REST_URL not set")
|
||||
|
||||
self._load_credentials()
|
||||
self._authenticate()
|
||||
self._get_templates()
|
||||
# self._get_templates()
|
||||
|
||||
context = instance.context
|
||||
workspace = context.data["workspaceDir"]
|
||||
|
|
@ -349,7 +362,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
"platform": 0,
|
||||
"job": {
|
||||
"jobName": jobname,
|
||||
"templateId": self._resolve_template(
|
||||
"templateId": self._get_template_id(
|
||||
instance.data["renderer"]),
|
||||
"chunksInterleave": 2,
|
||||
"chunksPriority": "0",
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
label = "Attributes"
|
||||
hosts = ["maya"]
|
||||
actions = [pype.api.RepairContextAction]
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
# Check for preset existence.
|
||||
|
|
@ -74,8 +75,12 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
|
||||
presets_to_validate = attributes[name]
|
||||
for attribute in node.listAttr():
|
||||
if attribute.attrName() in presets_to_validate:
|
||||
expected = presets_to_validate[attribute.attrName()]
|
||||
names = [attribute.shortName(), attribute.longName()]
|
||||
attribute_name = list(
|
||||
set(names) & set(presets_to_validate.keys())
|
||||
)
|
||||
if attribute_name:
|
||||
expected = presets_to_validate[attribute_name[0]]
|
||||
if attribute.get() != expected:
|
||||
invalid_attributes.append(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -75,11 +75,11 @@ class ValidateLookSets(pyblish.api.InstancePlugin):
|
|||
if missing_sets:
|
||||
for set in missing_sets:
|
||||
if '_SET' not in set:
|
||||
# A set of this node is not coming along, this is wrong!
|
||||
cls.log.error("Missing sets '{}' for node "
|
||||
"'{}'".format(missing_sets, node))
|
||||
invalid.append(node)
|
||||
continue
|
||||
# A set of this node is not coming along, this is wrong!
|
||||
cls.log.error("Missing sets '{}' for node "
|
||||
"'{}'".format(missing_sets, node))
|
||||
invalid.append(node)
|
||||
continue
|
||||
|
||||
# Ensure the node is in the sets that are collected
|
||||
for shaderset, data in relationships.items():
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ def is_subdir(path, root_dir):
|
|||
root_dir = os.path.realpath(root_dir)
|
||||
|
||||
# If not on same drive
|
||||
if os.path.splitdrive(path)[0] != os.path.splitdrive(root_dir)[0]:
|
||||
if os.path.splitdrive(path)[0].lower() != os.path.splitdrive(root_dir)[0].lower(): # noqa: E501
|
||||
return False
|
||||
|
||||
# Get 'relative path' (can contain ../ which means going up)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ def subset_to_families(subset, family, families):
|
|||
new_subset = families + subset_sufx
|
||||
return "{}.{}".format(family, new_subset)
|
||||
|
||||
|
||||
class CreateWriteRender(avalon.nuke.Creator):
|
||||
# change this to template preset
|
||||
preset = "render"
|
||||
|
|
@ -70,9 +69,7 @@ class CreateWriteRender(avalon.nuke.Creator):
|
|||
write_data.update({
|
||||
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}"})
|
||||
|
||||
create_write_node(self.data["subset"], write_data)
|
||||
|
||||
return
|
||||
return create_write_node(self.data["subset"], write_data)
|
||||
|
||||
|
||||
class CreateWritePrerender(avalon.nuke.Creator):
|
||||
|
|
|
|||
317
pype/plugins/nuke/load/load_luts.py
Normal file
|
|
@ -0,0 +1,317 @@
|
|||
from avalon import api, style, io
|
||||
import nuke
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class LoadLuts(api.Loader):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
representations = ["lutJson"]
|
||||
families = ["lut"]
|
||||
|
||||
label = "Load Luts - nodes"
|
||||
order = 0
|
||||
icon = "cc"
|
||||
color = style.colors.light
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get the soft effects to particular read node
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): asset name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerised nuke node object
|
||||
"""
|
||||
# import dependencies
|
||||
from avalon.nuke import containerise
|
||||
|
||||
# get main variables
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
vname = version.get("name", None)
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
namespace = namespace or context['asset']['name']
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
|
||||
"source", "author", "fps"]
|
||||
|
||||
data_imprint = {"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": vname,
|
||||
"colorspaceInput": colorspace,
|
||||
"objectName": object_name}
|
||||
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: version_data[k]})
|
||||
|
||||
# getting file path
|
||||
file = self.fname.replace("\\", "/")
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f["effects"])
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
GN = nuke.createNode("Group")
|
||||
|
||||
GN["name"].setValue(object_name)
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for ef_name, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if isinstance(v, list) and len(v) > 4:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# try to find parent read node
|
||||
self.connect_read_node(GN, namespace, json_f["assignTo"])
|
||||
|
||||
GN["tile_color"].setValue(int("0x3469ffff", 16))
|
||||
|
||||
self.log.info("Loaded lut setup: `{}`".format(GN["name"].value()))
|
||||
|
||||
return containerise(
|
||||
node=GN,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
from avalon.nuke import (
|
||||
update_container
|
||||
)
|
||||
# get main variables
|
||||
# Get version from io
|
||||
version = io.find_one({
|
||||
"type": "version",
|
||||
"_id": representation["parent"]
|
||||
})
|
||||
# get corresponding node
|
||||
GN = nuke.toNode(container['objectName'])
|
||||
|
||||
file = api.get_representation_path(representation).replace("\\", "/")
|
||||
name = container['name']
|
||||
version_data = version.get("data", {})
|
||||
vname = version.get("name", None)
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
namespace = container['namespace']
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
|
||||
"source", "author", "fps"]
|
||||
|
||||
data_imprint = {"representation": str(representation["_id"]),
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": vname,
|
||||
"colorspaceInput": colorspace,
|
||||
"objectName": object_name}
|
||||
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: version_data[k]})
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
GN,
|
||||
data_imprint
|
||||
)
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f["effects"])
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
# first remove all nodes
|
||||
[nuke.delete(n) for n in nuke.allNodes()]
|
||||
|
||||
# create input node
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for ef_name, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if isinstance(v, list) and len(v) > 3:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
# create output node
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# try to find parent read node
|
||||
self.connect_read_node(GN, namespace, json_f["assignTo"])
|
||||
|
||||
# get all versions in list
|
||||
versions = io.find({
|
||||
"type": "version",
|
||||
"parent": version["parent"]
|
||||
}).distinct('name')
|
||||
|
||||
max_version = max(versions)
|
||||
|
||||
# change color of node
|
||||
if version.get("name") not in [max_version]:
|
||||
GN["tile_color"].setValue(int("0xd84f20ff", 16))
|
||||
else:
|
||||
GN["tile_color"].setValue(int("0x3469ffff", 16))
|
||||
|
||||
self.log.info("udated to version: {}".format(version.get("name")))
|
||||
|
||||
def connect_read_node(self, group_node, asset, subset):
|
||||
"""
|
||||
Finds read node and selects it
|
||||
|
||||
Arguments:
|
||||
asset (str): asset name
|
||||
|
||||
Returns:
|
||||
nuke node: node is selected
|
||||
None: if nothing found
|
||||
"""
|
||||
search_name = "{0}_{1}".format(asset, subset)
|
||||
node = [n for n in nuke.allNodes() if search_name in n["name"].value()]
|
||||
if len(node) > 0:
|
||||
rn = node[0]
|
||||
else:
|
||||
rn = None
|
||||
|
||||
# Parent read node has been found
|
||||
# solving connections
|
||||
if rn:
|
||||
dep_nodes = rn.dependent()
|
||||
|
||||
if len(dep_nodes) > 0:
|
||||
for dn in dep_nodes:
|
||||
dn.setInput(0, group_node)
|
||||
|
||||
group_node.setInput(0, rn)
|
||||
group_node.autoplace()
|
||||
|
||||
def reorder_nodes(self, data):
|
||||
new_order = OrderedDict()
|
||||
trackNums = [v["trackIndex"] for k, v in data.items()]
|
||||
subTrackNums = [v["subTrackIndex"] for k, v in data.items()]
|
||||
|
||||
for trackIndex in range(
|
||||
min(trackNums), max(trackNums) + 1):
|
||||
for subTrackIndex in range(
|
||||
min(subTrackNums), max(subTrackNums) + 1):
|
||||
item = self.get_item(data, trackIndex, subTrackIndex)
|
||||
if item is not {}:
|
||||
new_order.update(item)
|
||||
return new_order
|
||||
|
||||
def get_item(self, data, trackIndex, subTrackIndex):
|
||||
return {key: val for key, val in data.items()
|
||||
if subTrackIndex == val["subTrackIndex"]
|
||||
if trackIndex == val["trackIndex"]}
|
||||
|
||||
def byteify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes trought all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode('utf-8')
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
from avalon.nuke import viewer_update_and_undo_stop
|
||||
node = nuke.toNode(container['objectName'])
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
330
pype/plugins/nuke/load/load_luts_ip.py
Normal file
|
|
@ -0,0 +1,330 @@
|
|||
from avalon import api, style, io
|
||||
import nuke
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
from pype.nuke import lib
|
||||
|
||||
class LoadLutsInputProcess(api.Loader):
|
||||
"""Loading colorspace soft effect exported from nukestudio"""
|
||||
|
||||
representations = ["lutJson"]
|
||||
families = ["lut"]
|
||||
|
||||
label = "Load Luts - Input Process"
|
||||
order = 0
|
||||
icon = "eye"
|
||||
color = style.colors.alert
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""
|
||||
Loading function to get the soft effects to particular read node
|
||||
|
||||
Arguments:
|
||||
context (dict): context of version
|
||||
name (str): name of the version
|
||||
namespace (str): asset name
|
||||
data (dict): compulsory attribute > not used
|
||||
|
||||
Returns:
|
||||
nuke node: containerised nuke node object
|
||||
"""
|
||||
# import dependencies
|
||||
from avalon.nuke import containerise
|
||||
|
||||
# get main variables
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
vname = version.get("name", None)
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
namespace = namespace or context['asset']['name']
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
# prepare data for imprinting
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
|
||||
"source", "author", "fps"]
|
||||
|
||||
data_imprint = {"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": vname,
|
||||
"colorspaceInput": colorspace,
|
||||
"objectName": object_name}
|
||||
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: version_data[k]})
|
||||
|
||||
# getting file path
|
||||
file = self.fname.replace("\\", "/")
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f["effects"])
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
GN = nuke.createNode("Group")
|
||||
|
||||
GN["name"].setValue(object_name)
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for ef_name, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if isinstance(v, list) and len(v) > 4:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# try to place it under Viewer1
|
||||
if not self.connect_active_viewer(GN):
|
||||
nuke.delete(GN)
|
||||
return
|
||||
|
||||
GN["tile_color"].setValue(int("0x3469ffff", 16))
|
||||
|
||||
self.log.info("Loaded lut setup: `{}`".format(GN["name"].value()))
|
||||
|
||||
return containerise(
|
||||
node=GN,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
Nuke automatically tries to reset some variables when changing
|
||||
the loader's path to a new file. These automatic changes are to its
|
||||
inputs:
|
||||
|
||||
"""
|
||||
|
||||
from avalon.nuke import (
|
||||
update_container
|
||||
)
|
||||
# get main variables
|
||||
# Get version from io
|
||||
version = io.find_one({
|
||||
"type": "version",
|
||||
"_id": representation["parent"]
|
||||
})
|
||||
# get corresponding node
|
||||
GN = nuke.toNode(container['objectName'])
|
||||
|
||||
file = api.get_representation_path(representation).replace("\\", "/")
|
||||
name = container['name']
|
||||
version_data = version.get("data", {})
|
||||
vname = version.get("name", None)
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
workfile_first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
namespace = container['namespace']
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
object_name = "{}_{}".format(name, namespace)
|
||||
|
||||
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
|
||||
"source", "author", "fps"]
|
||||
|
||||
data_imprint = {"representation": str(representation["_id"]),
|
||||
"frameStart": first,
|
||||
"frameEnd": last,
|
||||
"version": vname,
|
||||
"colorspaceInput": colorspace,
|
||||
"objectName": object_name}
|
||||
|
||||
for k in add_keys:
|
||||
data_imprint.update({k: version_data[k]})
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
GN,
|
||||
data_imprint
|
||||
)
|
||||
|
||||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f["effects"])
|
||||
|
||||
# adding nodes to node graph
|
||||
# just in case we are in group lets jump out of it
|
||||
nuke.endGroup()
|
||||
|
||||
# adding content to the group node
|
||||
with GN:
|
||||
# first remove all nodes
|
||||
[nuke.delete(n) for n in nuke.allNodes()]
|
||||
|
||||
# create input node
|
||||
pre_node = nuke.createNode("Input")
|
||||
pre_node["name"].setValue("rgb")
|
||||
|
||||
for ef_name, ef_val in nodes_order.items():
|
||||
node = nuke.createNode(ef_val["class"])
|
||||
for k, v in ef_val["node"].items():
|
||||
if isinstance(v, list) and len(v) > 3:
|
||||
node[k].setAnimated()
|
||||
for i, value in enumerate(v):
|
||||
if isinstance(value, list):
|
||||
for ci, cv in enumerate(value):
|
||||
node[k].setValueAt(
|
||||
cv,
|
||||
(workfile_first_frame + i),
|
||||
ci)
|
||||
else:
|
||||
node[k].setValueAt(
|
||||
value,
|
||||
(workfile_first_frame + i))
|
||||
else:
|
||||
node[k].setValue(v)
|
||||
node.setInput(0, pre_node)
|
||||
pre_node = node
|
||||
|
||||
# create output node
|
||||
output = nuke.createNode("Output")
|
||||
output.setInput(0, pre_node)
|
||||
|
||||
# try to place it under Viewer1
|
||||
if not self.connect_active_viewer(GN):
|
||||
nuke.delete(GN)
|
||||
return
|
||||
|
||||
# get all versions in list
|
||||
versions = io.find({
|
||||
"type": "version",
|
||||
"parent": version["parent"]
|
||||
}).distinct('name')
|
||||
|
||||
max_version = max(versions)
|
||||
|
||||
# change color of node
|
||||
if version.get("name") not in [max_version]:
|
||||
GN["tile_color"].setValue(int("0xd84f20ff", 16))
|
||||
else:
|
||||
GN["tile_color"].setValue(int("0x3469ffff", 16))
|
||||
|
||||
self.log.info("udated to version: {}".format(version.get("name")))
|
||||
|
||||
def connect_active_viewer(self, group_node):
|
||||
"""
|
||||
Finds Active viewer and
|
||||
place the node under it, also adds
|
||||
name of group into Input Process of the viewer
|
||||
|
||||
Arguments:
|
||||
group_node (nuke node): nuke group node object
|
||||
|
||||
"""
|
||||
group_node_name = group_node["name"].value()
|
||||
|
||||
viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()]
|
||||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
self.log.error("Please create Viewer node before you run this action again")
|
||||
return None
|
||||
|
||||
# get coordinates of Viewer1
|
||||
xpos = viewer["xpos"].value()
|
||||
ypos = viewer["ypos"].value()
|
||||
|
||||
ypos += 150
|
||||
|
||||
viewer["ypos"].setValue(ypos)
|
||||
|
||||
# set coordinates to group node
|
||||
group_node["xpos"].setValue(xpos)
|
||||
group_node["ypos"].setValue(ypos + 50)
|
||||
|
||||
# add group node name to Viewer Input Process
|
||||
viewer["input_process_node"].setValue(group_node_name)
|
||||
|
||||
# put backdrop under
|
||||
lib.create_backdrop(label="Input Process", layer=2, nodes=[viewer, group_node], color="0x7c7faaff")
|
||||
|
||||
return True
|
||||
|
||||
def reorder_nodes(self, data):
|
||||
new_order = OrderedDict()
|
||||
trackNums = [v["trackIndex"] for k, v in data.items()]
|
||||
subTrackNums = [v["subTrackIndex"] for k, v in data.items()]
|
||||
|
||||
for trackIndex in range(
|
||||
min(trackNums), max(trackNums) + 1):
|
||||
for subTrackIndex in range(
|
||||
min(subTrackNums), max(subTrackNums) + 1):
|
||||
item = self.get_item(data, trackIndex, subTrackIndex)
|
||||
if item is not {}:
|
||||
new_order.update(item)
|
||||
return new_order
|
||||
|
||||
def get_item(self, data, trackIndex, subTrackIndex):
|
||||
return {key: val for key, val in data.items()
|
||||
if subTrackIndex == val["subTrackIndex"]
|
||||
if trackIndex == val["trackIndex"]}
|
||||
|
||||
def byteify(self, input):
|
||||
"""
|
||||
Converts unicode strings to strings
|
||||
It goes trought all dictionary
|
||||
|
||||
Arguments:
|
||||
input (dict/str): input
|
||||
|
||||
Returns:
|
||||
dict: with fixed values and keys
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode('utf-8')
|
||||
else:
|
||||
return input
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
from avalon.nuke import viewer_update_and_undo_stop
|
||||
node = nuke.toNode(container['objectName'])
|
||||
with viewer_update_and_undo_stop():
|
||||
nuke.delete(node)
|
||||
|
|
@ -101,7 +101,8 @@ class LoadMov(api.Loader):
|
|||
handles = version_data.get("handles", None)
|
||||
handle_start = version_data.get("handleStart", None)
|
||||
handle_end = version_data.get("handleEnd", None)
|
||||
|
||||
repr_cont = context["representation"]["context"]
|
||||
|
||||
# fix handle start and end if none are available
|
||||
if not handle_start and not handle_end:
|
||||
handle_start = handles
|
||||
|
|
@ -119,9 +120,11 @@ class LoadMov(api.Loader):
|
|||
file = self.fname.replace("\\", "/")
|
||||
log.info("file: {}\n".format(self.fname))
|
||||
|
||||
read_name = "Read"
|
||||
read_name += '_' + context["representation"]["context"]["subset"]
|
||||
read_name += '_' + context["representation"]["name"]
|
||||
read_name = "Read_{0}_{1}_{2}".format(
|
||||
repr_cont["asset"],
|
||||
repr_cont["subset"],
|
||||
repr_cont["representation"])
|
||||
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
with viewer_update_and_undo_stop():
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ class LoadSequence(api.Loader):
|
|||
"""Load image sequence into Nuke"""
|
||||
|
||||
families = ["write", "source", "plate", "render"]
|
||||
representations = ["exr", "dpx"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg"]
|
||||
|
||||
label = "Load sequence"
|
||||
order = -10
|
||||
|
|
@ -94,29 +94,29 @@ class LoadSequence(api.Loader):
|
|||
|
||||
log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
self.first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
self.handle_start = version_data.get("handleStart", 0)
|
||||
self.handle_start = version_data.get("handleStart", 0)
|
||||
self.handle_end = version_data.get("handleEnd", 0)
|
||||
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
handles = version_data.get("handles", 0)
|
||||
handle_start = version_data.get("handleStart", 0)
|
||||
handle_end = version_data.get("handleEnd", 0)
|
||||
|
||||
# fix handle start and end if none are available
|
||||
if not handle_start and not handle_end:
|
||||
handle_start = handles
|
||||
handle_end = handles
|
||||
|
||||
# # create handles offset
|
||||
# first -= handle_start
|
||||
# last += handle_end
|
||||
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
namespace = context['asset']['name']
|
||||
|
||||
first -= self.handle_start
|
||||
last += self.handle_end
|
||||
|
||||
file = self.fname.replace("\\", "/")
|
||||
log.info("file: {}\n".format(self.fname))
|
||||
|
||||
read_name = "Read_" + context["representation"]["context"]["subset"]
|
||||
repr_cont = context["representation"]["context"]
|
||||
read_name = "Read_{0}_{1}_{2}".format(
|
||||
repr_cont["asset"],
|
||||
repr_cont["subset"],
|
||||
repr_cont["representation"])
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
with viewer_update_and_undo_stop():
|
||||
|
|
@ -138,7 +138,7 @@ class LoadSequence(api.Loader):
|
|||
r["last"].setValue(int(last))
|
||||
|
||||
# add additional metadata from the version to imprint to Avalon knob
|
||||
add_keys = ["frameStart", "frameEnd", "handles",
|
||||
add_keys = ["frameStart", "frameEnd",
|
||||
"source", "colorspace", "author", "fps", "version",
|
||||
"handleStart", "handleEnd"]
|
||||
|
||||
|
|
@ -147,12 +147,18 @@ class LoadSequence(api.Loader):
|
|||
if k is 'version':
|
||||
data_imprint.update({k: context["version"]['name']})
|
||||
else:
|
||||
data_imprint.update({k: context["version"]['data'].get(k, str(None))})
|
||||
data_imprint.update(
|
||||
{k: context["version"]['data'].get(k, str(None))})
|
||||
|
||||
data_imprint.update({"objectName": read_name})
|
||||
|
||||
r["tile_color"].setValue(int("0x4ecd25ff", 16))
|
||||
|
||||
if version_data.get("retime", None):
|
||||
speed = version_data.get("speed", 1)
|
||||
time_warp_nodes = version_data.get("timewarps", [])
|
||||
self.make_retimes(r, speed, time_warp_nodes)
|
||||
|
||||
return containerise(r,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
|
|
@ -160,6 +166,34 @@ class LoadSequence(api.Loader):
|
|||
loader=self.__class__.__name__,
|
||||
data=data_imprint)
|
||||
|
||||
def make_retimes(self, node, speed, time_warp_nodes):
|
||||
''' Create all retime and timewarping nodes with coppied animation '''
|
||||
if speed != 1:
|
||||
rtn = nuke.createNode(
|
||||
"Retime",
|
||||
"speed {}".format(speed))
|
||||
rtn["before"].setValue("continue")
|
||||
rtn["after"].setValue("continue")
|
||||
rtn["input.first_lock"].setValue(True)
|
||||
rtn["input.first"].setValue(
|
||||
self.handle_start + self.first_frame
|
||||
)
|
||||
|
||||
if time_warp_nodes != []:
|
||||
for timewarp in time_warp_nodes:
|
||||
twn = nuke.createNode(timewarp["Class"],
|
||||
"name {}".format(timewarp["name"]))
|
||||
if isinstance(timewarp["lookup"], list):
|
||||
# if array for animation
|
||||
twn["lookup"].setAnimated()
|
||||
for i, value in enumerate(timewarp["lookup"]):
|
||||
twn["lookup"].setValueAt(
|
||||
(self.first_frame + i) + value,
|
||||
(self.first_frame + i))
|
||||
else:
|
||||
# if static value `int`
|
||||
twn["lookup"].setValue(timewarp["lookup"])
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
|
|
@ -200,11 +234,12 @@ class LoadSequence(api.Loader):
|
|||
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
self.handle_start = version_data.get("handleStart", 0)
|
||||
self.handle_end = version_data.get("handleEnd", 0)
|
||||
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
handles = version_data.get("handles", 0)
|
||||
handle_start = version_data.get("handleStart", 0)
|
||||
handle_end = version_data.get("handleEnd", 0)
|
||||
|
||||
if first is None:
|
||||
log.warning("Missing start frame for updated version"
|
||||
|
|
@ -212,14 +247,8 @@ class LoadSequence(api.Loader):
|
|||
"{} ({})".format(node['name'].value(), representation))
|
||||
first = 0
|
||||
|
||||
# fix handle start and end if none are available
|
||||
if not handle_start and not handle_end:
|
||||
handle_start = handles
|
||||
handle_end = handles
|
||||
|
||||
# create handles offset
|
||||
first -= handle_start
|
||||
last += handle_end
|
||||
first -= self.handle_start
|
||||
last += self.handle_end
|
||||
|
||||
# Update the loader's path whilst preserving some values
|
||||
with preserve_trim(node):
|
||||
|
|
@ -241,7 +270,6 @@ class LoadSequence(api.Loader):
|
|||
"version": version.get("name"),
|
||||
"colorspace": version_data.get("colorspace"),
|
||||
"source": version_data.get("source"),
|
||||
"handles": version_data.get("handles"),
|
||||
"handleStart": version_data.get("handleStart"),
|
||||
"handleEnd": version_data.get("handleEnd"),
|
||||
"fps": version_data.get("fps"),
|
||||
|
|
@ -255,6 +283,11 @@ class LoadSequence(api.Loader):
|
|||
else:
|
||||
node["tile_color"].setValue(int("0x4ecd25ff", 16))
|
||||
|
||||
if version_data.get("retime", None):
|
||||
speed = version_data.get("speed", 1)
|
||||
time_warp_nodes = version_data.get("timewarps", [])
|
||||
self.make_retimes(node, speed, time_warp_nodes)
|
||||
|
||||
# Update the imprinted representation
|
||||
update_container(
|
||||
node,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import nuke
|
||||
from avalon import api, io
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -19,5 +18,6 @@ class CollectAssetInfo(pyblish.api.ContextPlugin):
|
|||
self.log.info("asset_data: {}".format(asset_data))
|
||||
|
||||
context.data['handles'] = int(asset_data["data"].get("handles", 0))
|
||||
context.data["handleStart"] = int(asset_data["data"].get("handleStart", 0))
|
||||
context.data["handleStart"] = int(asset_data["data"].get(
|
||||
"handleStart", 0))
|
||||
context.data["handleEnd"] = int(asset_data["data"].get("handleEnd", 0))
|
||||
|
|
|
|||
|
|
@ -101,6 +101,11 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
group_node = [x for x in instance if x.Class() == "Group"][0]
|
||||
deadlineChunkSize = 1
|
||||
if "deadlineChunkSize" in group_node.knobs():
|
||||
deadlineChunkSize = group_node["deadlineChunkSize"].value()
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"path": path,
|
||||
|
|
@ -112,6 +117,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"frameEnd": last_frame,
|
||||
"outputType": output_type,
|
||||
"colorspace": node["colorspace"].value(),
|
||||
"deadlineChunkSize": deadlineChunkSize
|
||||
})
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -27,9 +27,13 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
# root = nuke.root()
|
||||
# node_subset_name = instance.data.get("name", None)
|
||||
node = instance[1]
|
||||
node = None
|
||||
for x in instance:
|
||||
if x.Class() == "Write":
|
||||
node = x
|
||||
|
||||
if node is None:
|
||||
return
|
||||
|
||||
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
|
|
@ -80,6 +84,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
start=int(instance.data["frameStart"]),
|
||||
end=int(instance.data["frameEnd"])
|
||||
),
|
||||
"ChunkSize": instance.data["deadlineChunkSize"],
|
||||
|
||||
"Comment": comment,
|
||||
|
||||
|
|
|
|||
42
pype/plugins/nuke/publish/validate_write_deadline_tab.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import pyblish.api
|
||||
import pype.nuke.lib
|
||||
|
||||
|
||||
class RepairNukeWriteDeadlineTab(pyblish.api.Action):
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
for instance in instances:
|
||||
group_node = [x for x in instance if x.Class() == "Group"][0]
|
||||
pype.nuke.lib.add_deadline_tab(group_node)
|
||||
|
||||
|
||||
class ValidateNukeWriteDeadlineTab(pyblish.api.InstancePlugin):
|
||||
"""Ensure Deadline tab is present and current."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Deadline Tab"
|
||||
hosts = ["nuke"]
|
||||
optional = True
|
||||
families = ["write"]
|
||||
actions = [RepairNukeWriteDeadlineTab]
|
||||
|
||||
def process(self, instance):
|
||||
group_node = [x for x in instance if x.Class() == "Group"][0]
|
||||
|
||||
msg = "Deadline tab missing on \"{}\"".format(group_node.name())
|
||||
assert "Deadline" in group_node.knobs(), msg
|
||||
121
pype/plugins/nukestudio/publish/collect_calculate_retime.py
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
from pyblish import api
|
||||
import hiero
|
||||
import math
|
||||
|
||||
|
||||
class CollectCalculateRetime(api.InstancePlugin):
|
||||
"""Calculate Retiming of selected track items."""
|
||||
|
||||
order = api.CollectorOrder + 0.02
|
||||
label = "Collect Calculate Retiming"
|
||||
hosts = ["nukestudio"]
|
||||
families = ['retime']
|
||||
|
||||
def process(self, instance):
|
||||
margin_in = instance.data["retimeMarginIn"]
|
||||
margin_out = instance.data["retimeMarginOut"]
|
||||
self.log.debug("margin_in: '{0}', margin_out: '{1}'".format(margin_in, margin_out))
|
||||
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
|
||||
track_item = instance.data["item"]
|
||||
|
||||
# define basic clip frame range variables
|
||||
timeline_in = int(track_item.timelineIn())
|
||||
timeline_out = int(track_item.timelineOut())
|
||||
source_in = int(track_item.sourceIn())
|
||||
source_out = int(track_item.sourceOut())
|
||||
speed = track_item.playbackSpeed()
|
||||
self.log.debug("_BEFORE: \n timeline_in: `{0}`,\n timeline_out: `{1}`,\
|
||||
\n source_in: `{2}`,\n source_out: `{3}`,\n speed: `{4}`,\n handle_start: `{5}`,\n handle_end: `{6}`".format(
|
||||
timeline_in,
|
||||
timeline_out,
|
||||
source_in,
|
||||
source_out,
|
||||
speed,
|
||||
handle_start,
|
||||
handle_end
|
||||
))
|
||||
|
||||
# loop withing subtrack items
|
||||
source_in_change = 0
|
||||
source_out_change = 0
|
||||
for s_track_item in track_item.linkedItems():
|
||||
if isinstance(s_track_item, hiero.core.EffectTrackItem) \
|
||||
and "TimeWarp" in s_track_item.node().Class():
|
||||
|
||||
# adding timewarp attribute to instance
|
||||
if not instance.data.get("timeWarpNodes", None):
|
||||
instance.data["timeWarpNodes"] = list()
|
||||
|
||||
# ignore item if not enabled
|
||||
if s_track_item.isEnabled():
|
||||
node = s_track_item.node()
|
||||
name = node["name"].value()
|
||||
look_up = node["lookup"].value()
|
||||
animated = node["lookup"].isAnimated()
|
||||
if animated:
|
||||
look_up = [((node["lookup"].getValueAt(i)) - i)
|
||||
for i in range((timeline_in - handle_start), (timeline_out + handle_end) + 1)
|
||||
]
|
||||
# calculate differnce
|
||||
diff_in = (node["lookup"].getValueAt(
|
||||
timeline_in)) - timeline_in
|
||||
diff_out = (node["lookup"].getValueAt(
|
||||
timeline_out)) - timeline_out
|
||||
|
||||
# calculate source
|
||||
source_in_change += diff_in
|
||||
source_out_change += diff_out
|
||||
|
||||
# calculate speed
|
||||
speed_in = (node["lookup"].getValueAt(timeline_in) / (
|
||||
float(timeline_in) * .01)) * .01
|
||||
speed_out = (node["lookup"].getValueAt(timeline_out) / (
|
||||
float(timeline_out) * .01)) * .01
|
||||
|
||||
# calculate handles
|
||||
handle_start = int(
|
||||
math.ceil(
|
||||
(handle_start * speed_in * 1000) / 1000.0)
|
||||
)
|
||||
|
||||
handle_end = int(
|
||||
math.ceil(
|
||||
(handle_end * speed_out * 1000) / 1000.0)
|
||||
)
|
||||
self.log.debug(
|
||||
("diff_in, diff_out", diff_in, diff_out))
|
||||
self.log.debug(
|
||||
("source_in_change, source_out_change", source_in_change, source_out_change))
|
||||
|
||||
instance.data["timeWarpNodes"].append({"Class": "TimeWarp",
|
||||
"name": name,
|
||||
"lookup": look_up})
|
||||
|
||||
self.log.debug((source_in_change, source_out_change))
|
||||
# recalculate handles by the speed
|
||||
handle_start *= speed
|
||||
handle_end *= speed
|
||||
self.log.debug("speed: handle_start: '{0}', handle_end: '{1}'".format(handle_start, handle_end))
|
||||
|
||||
source_in += int(source_in_change)
|
||||
source_out += int(source_out_change * speed)
|
||||
handle_start += (margin_in)
|
||||
handle_end += (margin_out)
|
||||
self.log.debug("margin: handle_start: '{0}', handle_end: '{1}'".format(handle_start, handle_end))
|
||||
|
||||
# add all data to Instance
|
||||
instance.data["sourceIn"] = source_in
|
||||
instance.data["sourceOut"] = source_out
|
||||
instance.data["sourceInH"] = int(source_in - math.ceil(
|
||||
(handle_start * 1000) / 1000.0))
|
||||
instance.data["sourceOutH"] = int(source_out + math.ceil(
|
||||
(handle_end * 1000) / 1000.0))
|
||||
instance.data["speed"] = speed
|
||||
|
||||
self.log.debug("timeWarpNodes: {}".format(instance.data["timeWarpNodes"]))
|
||||
self.log.debug("sourceIn: {}".format(instance.data["sourceIn"]))
|
||||
self.log.debug("sourceOut: {}".format(instance.data["sourceOut"]))
|
||||
self.log.debug("speed: {}".format(instance.data["speed"]))
|
||||
|
|
@ -20,83 +20,114 @@ class CollectClips(api.ContextPlugin):
|
|||
|
||||
projectdata = context.data["projectData"]
|
||||
version = context.data.get("version", "001")
|
||||
instances_data = []
|
||||
for item in context.data.get("selection", []):
|
||||
# Skip audio track items
|
||||
# Try/Except is to handle items types, like EffectTrackItem
|
||||
try:
|
||||
media_type = "core.Hiero.Python.TrackItem.MediaType.kVideo"
|
||||
if str(item.mediaType()) != media_type:
|
||||
sequence = context.data.get("activeSequence")
|
||||
selection = context.data.get("selection")
|
||||
|
||||
track_effects = dict()
|
||||
|
||||
# collect all trackItems as instances
|
||||
for track_index, video_track in enumerate(sequence.videoTracks()):
|
||||
items = video_track.items()
|
||||
sub_items = video_track.subTrackItems()
|
||||
|
||||
for item in items:
|
||||
# compare with selection or if disabled
|
||||
if item not in selection or not item.isEnabled():
|
||||
continue
|
||||
except:
|
||||
|
||||
# Skip audio track items
|
||||
# Try/Except is to handle items types, like EffectTrackItem
|
||||
try:
|
||||
media_type = "core.Hiero.Python.TrackItem.MediaType.kVideo"
|
||||
if str(item.mediaType()) != media_type:
|
||||
continue
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
asset = item.name()
|
||||
track = item.parent()
|
||||
source = item.source().mediaSource()
|
||||
source_path = source.firstpath()
|
||||
effects = [f for f in item.linkedItems() if f.isEnabled()]
|
||||
|
||||
# If source is *.nk its a comp effect and we need to fetch the
|
||||
# write node output. This should be improved by parsing the script
|
||||
# rather than opening it.
|
||||
if source_path.endswith(".nk"):
|
||||
nuke.scriptOpen(source_path)
|
||||
# There should noly be one.
|
||||
write_node = nuke.allNodes(filter="Write")[0]
|
||||
path = nuke.filename(write_node)
|
||||
|
||||
if "%" in path:
|
||||
# Get start frame from Nuke script and use the item source
|
||||
# in/out, because you can have multiple shots covered with
|
||||
# one nuke script.
|
||||
start_frame = int(nuke.root()["first_frame"].getValue())
|
||||
if write_node["use_limit"].getValue():
|
||||
start_frame = int(write_node["first"].getValue())
|
||||
|
||||
path = path % (start_frame + item.sourceIn())
|
||||
|
||||
source_path = path
|
||||
self.log.debug(
|
||||
"Fetched source path \"{}\" from \"{}\" in "
|
||||
"\"{}\".".format(
|
||||
source_path, write_node.name(), source.firstpath()
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
head, padding, ext = os.path.basename(source_path).split(".")
|
||||
source_first_frame = int(padding)
|
||||
except Exception:
|
||||
source_first_frame = 0
|
||||
|
||||
data = {"name": "{0}_{1}".format(track.name(), item.name()),
|
||||
"item": item,
|
||||
"source": source,
|
||||
"sourcePath": source_path,
|
||||
"track": track.name(),
|
||||
"trackIndex": track_index,
|
||||
"sourceFirst": source_first_frame,
|
||||
"effects": effects,
|
||||
"sourceIn": int(item.sourceIn()),
|
||||
"sourceOut": int(item.sourceOut()),
|
||||
"clipIn": int(item.timelineIn()),
|
||||
"clipOut": int(item.timelineOut()),
|
||||
"asset": asset,
|
||||
"family": "clip",
|
||||
"families": [],
|
||||
"handles": 0,
|
||||
"handleStart": projectdata.get("handles", 0),
|
||||
"handleEnd": projectdata.get("handles", 0),
|
||||
"version": int(version)}
|
||||
|
||||
instance = context.create_instance(**data)
|
||||
|
||||
self.log.info("Created instance: {}".format(instance))
|
||||
self.log.debug(">> effects: {}".format(instance.data["effects"]))
|
||||
|
||||
context.data["assetsShared"][asset] = dict()
|
||||
|
||||
# from now we are collecting only subtrackitems on
|
||||
# track with no video items
|
||||
if len(items) > 0:
|
||||
continue
|
||||
|
||||
track = item.parent()
|
||||
source = item.source().mediaSource()
|
||||
source_path = source.firstpath()
|
||||
# create list in track key
|
||||
# get all subTrackItems and add it to context
|
||||
track_effects[track_index] = list()
|
||||
|
||||
# If source is *.nk its a comp effect and we need to fetch the
|
||||
# write node output. This should be improved by parsing the script
|
||||
# rather than opening it.
|
||||
if source_path.endswith(".nk"):
|
||||
nuke.scriptOpen(source_path)
|
||||
# There should noly be one.
|
||||
write_node = nuke.allNodes(filter="Write")[0]
|
||||
path = nuke.filename(write_node)
|
||||
# collect all subtrack items
|
||||
for sitem in sub_items:
|
||||
# unwrap from tuple >> it is always tuple with one item
|
||||
sitem = sitem[0]
|
||||
# checking if not enabled
|
||||
if not sitem.isEnabled():
|
||||
continue
|
||||
|
||||
if "%" in path:
|
||||
# Get start frame from Nuke script and use the item source
|
||||
# in/out, because you can have multiple shots covered with
|
||||
# one nuke script.
|
||||
start_frame = int(nuke.root()["first_frame"].getValue())
|
||||
if write_node["use_limit"].getValue():
|
||||
start_frame = int(write_node["first"].getValue())
|
||||
track_effects[track_index].append(sitem)
|
||||
|
||||
path = path % (start_frame + item.sourceIn())
|
||||
|
||||
source_path = path
|
||||
self.log.debug(
|
||||
"Fetched source path \"{}\" from \"{}\" in "
|
||||
"\"{}\".".format(
|
||||
source_path, write_node.name(), source.firstpath()
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
head, padding, ext = os.path.basename(source_path).split(".")
|
||||
source_first_frame = int(padding)
|
||||
except:
|
||||
source_first_frame = 0
|
||||
|
||||
instances_data.append(
|
||||
{
|
||||
"name": "{0}_{1}".format(track.name(), item.name()),
|
||||
"item": item,
|
||||
"source": source,
|
||||
"sourcePath": source_path,
|
||||
"track": track.name(),
|
||||
"sourceFirst": source_first_frame,
|
||||
"sourceIn": int(item.sourceIn()),
|
||||
"sourceOut": int(item.sourceOut()),
|
||||
"clipIn": int(item.timelineIn()),
|
||||
"clipOut": int(item.timelineOut())
|
||||
}
|
||||
)
|
||||
|
||||
for data in instances_data:
|
||||
data.update(
|
||||
{
|
||||
"asset": data["item"].name(),
|
||||
"family": "clip",
|
||||
"families": [],
|
||||
"handles": 0,
|
||||
"handleStart": projectdata.get("handles", 0),
|
||||
"handleEnd": projectdata.get("handles", 0),
|
||||
"version": int(version)
|
||||
}
|
||||
)
|
||||
instance = context.create_instance(**data)
|
||||
self.log.debug(
|
||||
"Created instance with data: {}".format(instance.data)
|
||||
)
|
||||
context.data["assetsShared"][data["asset"]] = dict()
|
||||
context.data["trackEffects"] = track_effects
|
||||
self.log.debug(">> sub_track_items: `{}`".format(track_effects))
|
||||
|
|
|
|||
96
pype/plugins/nukestudio/publish/collect_effects.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
import pyblish.api
|
||||
import re
|
||||
|
||||
|
||||
class CollectVideoTracksLuts(pyblish.api.InstancePlugin):
|
||||
"""Collect video tracks effects into context."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1015
|
||||
label = "Collect Soft Lut Effects"
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.debug("Finding soft effect for subset: `{}`".format(instance.data.get("subset")))
|
||||
|
||||
# taking active sequence
|
||||
subset = instance.data["subset"]
|
||||
track_effects = instance.context.data.get("trackEffects", {})
|
||||
track_index = instance.data["trackIndex"]
|
||||
effects = instance.data["effects"]
|
||||
|
||||
# creating context attribute
|
||||
self.effects = {"assignTo": subset, "effects": dict()}
|
||||
|
||||
for sitem in effects:
|
||||
self.add_effect(instance, track_index, sitem)
|
||||
|
||||
for t_index, sitems in track_effects.items():
|
||||
for sitem in sitems:
|
||||
if not t_index > track_index:
|
||||
continue
|
||||
self.log.debug(">> sitem: `{}`".format(sitem))
|
||||
self.add_effect(instance, t_index, sitem)
|
||||
|
||||
if self.effects["effects"]:
|
||||
instance.data["effectTrackItems"] = self.effects
|
||||
|
||||
if len(instance.data.get("effectTrackItems", {}).keys()) > 0:
|
||||
instance.data["families"] += ["lut"]
|
||||
self.log.debug("effects.keys: {}".format(instance.data.get("effectTrackItems", {}).keys()))
|
||||
self.log.debug("effects: {}".format(instance.data.get("effectTrackItems", {})))
|
||||
|
||||
def add_effect(self, instance, track_index, item):
|
||||
track = item.parentTrack().name()
|
||||
# node serialization
|
||||
node = item.node()
|
||||
node_serialized = self.node_serialisation(instance, node)
|
||||
|
||||
# collect timelineIn/Out
|
||||
effect_t_in = int(item.timelineIn())
|
||||
effect_t_out = int(item.timelineOut())
|
||||
|
||||
node_name = item.name()
|
||||
node_class = re.sub(r"\d+", "", node_name)
|
||||
|
||||
self.effects["effects"].update({node_name: {
|
||||
"class": node_class,
|
||||
"timelineIn": effect_t_in,
|
||||
"timelineOut": effect_t_out,
|
||||
"subTrackIndex": item.subTrackIndex(),
|
||||
"trackIndex": track_index,
|
||||
"track": track,
|
||||
"node": node_serialized
|
||||
}})
|
||||
|
||||
def node_serialisation(self, instance, node):
|
||||
node_serialized = {}
|
||||
timeline_in_h = instance.data["clipInH"]
|
||||
timeline_out_h = instance.data["clipOutH"]
|
||||
|
||||
# adding ignoring knob keys
|
||||
_ignoring_keys = ['invert_mask', 'help', 'mask',
|
||||
'xpos', 'ypos', 'layer', 'process_mask', 'channel',
|
||||
'channels', 'maskChannelMask', 'maskChannelInput',
|
||||
'note_font', 'note_font_size', 'unpremult',
|
||||
'postage_stamp_frame', 'maskChannel', 'export_cc',
|
||||
'select_cccid', 'mix', 'version']
|
||||
|
||||
# loop trough all knobs and collect not ignored
|
||||
# and any with any value
|
||||
for knob in node.knobs().keys():
|
||||
# skip nodes in ignore keys
|
||||
if knob in _ignoring_keys:
|
||||
continue
|
||||
|
||||
# get animation if node is animated
|
||||
if node[knob].isAnimated():
|
||||
# grab animation including handles
|
||||
knob_anim = [node[knob].getValueAt(i)
|
||||
for i in range(timeline_in_h, timeline_out_h + 1)]
|
||||
|
||||
node_serialized[knob] = knob_anim
|
||||
else:
|
||||
node_serialized[knob] = node[knob].value()
|
||||
|
||||
return node_serialized
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectClipFrameRanges(pyblish.api.InstancePlugin):
|
||||
"""Collect all frame range data: source(In,Out), timeline(In,Out), edit_(in, out), f(start, end)"""
|
||||
|
||||
|
|
@ -15,8 +16,10 @@ class CollectClipFrameRanges(pyblish.api.InstancePlugin):
|
|||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
|
||||
source_in_h = instance.data["sourceIn"] - handle_start
|
||||
source_out_h = instance.data["sourceOut"] + handle_end
|
||||
source_in_h = instance.data("sourceInH",
|
||||
instance.data("sourceIn") - handle_start)
|
||||
source_out_h = instance.data("sourceOutH",
|
||||
instance.data("sourceOut") + handle_end)
|
||||
|
||||
timeline_in = instance.data["clipIn"]
|
||||
timeline_out = instance.data["clipOut"]
|
||||
|
|
|
|||
|
|
@ -38,6 +38,10 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
tags = instance.data.get("tags", None)
|
||||
clip = instance.data["item"]
|
||||
asset = instance.data.get("asset")
|
||||
sequence = context.data['activeSequence']
|
||||
width = int(sequence.format().width())
|
||||
height = int(sequence.format().height())
|
||||
pixel_aspect = sequence.format().pixelAspect()
|
||||
|
||||
# build data for inner nukestudio project property
|
||||
data = {
|
||||
|
|
@ -157,6 +161,9 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
"asset": asset,
|
||||
"hierarchy": hierarchy,
|
||||
"parents": parents,
|
||||
"width": width,
|
||||
"height": height,
|
||||
"pixelAspect": pixel_aspect,
|
||||
"tasks": instance.data["tasks"]
|
||||
})
|
||||
|
||||
|
|
@ -191,7 +198,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
instances = context[:]
|
||||
sequence = context.data['activeSequence']
|
||||
|
||||
# create hierarchyContext attr if context has none
|
||||
|
||||
temp_context = {}
|
||||
|
|
@ -216,6 +223,9 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
instance.data["parents"] = s_asset_data["parents"]
|
||||
instance.data["hierarchy"] = s_asset_data["hierarchy"]
|
||||
instance.data["tasks"] = s_asset_data["tasks"]
|
||||
instance.data["width"] = s_asset_data["width"]
|
||||
instance.data["height"] = s_asset_data["height"]
|
||||
instance.data["pixelAspect"] = s_asset_data["pixelAspect"]
|
||||
|
||||
# adding frame start if any on instance
|
||||
start_frame = s_asset_data.get("startingFrame")
|
||||
|
|
@ -265,16 +275,10 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
|
||||
# adding SourceResolution if Tag was present
|
||||
if instance.data.get("main"):
|
||||
width = int(sequence.format().width())
|
||||
height = int(sequence.format().height())
|
||||
pixel_aspect = sequence.format().pixelAspect()
|
||||
self.log.info("Sequence Width,Height,PixelAspect are: `{0},{1},{2}`".format(
|
||||
width, height, pixel_aspect))
|
||||
|
||||
in_info['custom_attributes'].update({
|
||||
"resolutionWidth": width,
|
||||
"resolutionHeight": height,
|
||||
"pixelAspect": pixel_aspect
|
||||
"resolutionWidth": instance.data["width"],
|
||||
"resolutionHeight": instance.data["height"],
|
||||
"pixelAspect": instance.data["pixelAspect"]
|
||||
})
|
||||
|
||||
in_info['tasks'] = instance.data['tasks']
|
||||
|
|
|
|||
|
|
@ -66,11 +66,14 @@ class CollectPlates(api.InstancePlugin):
|
|||
item = instance.data["item"]
|
||||
width = int(item.source().mediaSource().width())
|
||||
height = int(item.source().mediaSource().height())
|
||||
self.log.info("Source Width and Height are: `{0} x {1}`".format(
|
||||
width, height))
|
||||
pixel_aspect = int(item.source().mediaSource().pixelAspect())
|
||||
|
||||
self.log.info("Source Width and Height are: `{0} x {1} : {2}`".format(
|
||||
width, height, pixel_aspect))
|
||||
data.update({
|
||||
"width": width,
|
||||
"height": height
|
||||
"height": height,
|
||||
"pixelAspect": pixel_aspect
|
||||
})
|
||||
|
||||
self.log.debug("Creating instance with name: {}".format(data["name"]))
|
||||
|
|
@ -123,7 +126,7 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
transfer_data = [
|
||||
"handleStart", "handleEnd", "sourceIn", "sourceOut", "frameStart",
|
||||
"frameEnd", "sourceInH", "sourceOutH", "clipIn", "clipOut",
|
||||
"clipInH", "clipOutH", "asset", "track", "version"
|
||||
"clipInH", "clipOutH", "asset", "track", "version", "width", "height", "pixelAspect"
|
||||
]
|
||||
|
||||
# pass data to version
|
||||
|
|
@ -133,11 +136,11 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
version_data.update({
|
||||
"handles": version_data['handleStart'],
|
||||
"colorspace": item.sourceMediaColourTransform(),
|
||||
"colorspaceScript": instance.context.data["colorspace"],
|
||||
"families": [f for f in families if 'ftrack' not in f],
|
||||
"subset": name,
|
||||
"fps": instance.context.data["fps"]
|
||||
})
|
||||
instance.data["versionData"] = version_data
|
||||
|
||||
try:
|
||||
basename, ext = os.path.splitext(source_file)
|
||||
|
|
@ -156,9 +159,11 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
start_frame = source_first_frame + instance.data["sourceInH"]
|
||||
duration = instance.data["sourceOutH"] - instance.data["sourceInH"]
|
||||
end_frame = start_frame + duration
|
||||
self.log.debug("start_frame: `{}`".format(start_frame))
|
||||
self.log.debug("end_frame: `{}`".format(end_frame))
|
||||
files = [file % i for i in range(start_frame, (end_frame + 1), 1)]
|
||||
except Exception as e:
|
||||
self.log.debug("Exception in file: {}".format(e))
|
||||
self.log.warning("Exception in file: {}".format(e))
|
||||
head, ext = os.path.splitext(source_file)
|
||||
ext = ext[1:]
|
||||
files = source_file
|
||||
|
|
@ -207,16 +212,41 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
thumb_representation)
|
||||
|
||||
# adding representation for plates
|
||||
frame_start = instance.data["frameStart"] - \
|
||||
instance.data["handleStart"]
|
||||
frame_end = instance.data["frameEnd"] + instance.data["handleEnd"]
|
||||
|
||||
# exception for retimes
|
||||
if instance.data.get("retime"):
|
||||
source_in_h = instance.data["sourceInH"]
|
||||
source_in = instance.data["sourceIn"]
|
||||
source_handle_start = source_in_h - source_in
|
||||
frame_start = instance.data["frameStart"] + source_handle_start
|
||||
duration = instance.data["sourceOutH"] - instance.data["sourceInH"]
|
||||
frame_end = frame_start + duration
|
||||
|
||||
plates_representation = {
|
||||
'files': files,
|
||||
'stagingDir': staging_dir,
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
"frameStart": instance.data["frameStart"] - instance.data["handleStart"],
|
||||
"frameEnd": instance.data["frameEnd"] + instance.data["handleEnd"],
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
}
|
||||
instance.data["representations"].append(plates_representation)
|
||||
|
||||
# deal with retimed clip
|
||||
if instance.data.get("retime"):
|
||||
version_data.update({
|
||||
"retime": True,
|
||||
"speed": instance.data.get("speed", 1),
|
||||
"timewarps": instance.data.get("timeWarpNodes", []),
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
})
|
||||
|
||||
instance.data["versionData"] = version_data
|
||||
|
||||
# testing families
|
||||
family = instance.data["family"]
|
||||
families = instance.data["families"]
|
||||
|
|
|
|||
|
|
@ -14,12 +14,4 @@ class CollectSelection(pyblish.api.ContextPlugin):
|
|||
|
||||
self.log.debug("selection: {}".format(selection))
|
||||
|
||||
# if not selection:
|
||||
# self.log.debug(
|
||||
# "Nothing is selected. Collecting all items from sequence "
|
||||
# "\"{}\"".format(hiero.ui.activeSequence())
|
||||
# )
|
||||
# for track in hiero.ui.activeSequence().items():
|
||||
# selection.extend(track.items())
|
||||
|
||||
context.data["selection"] = selection
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from pyblish import api
|
||||
|
||||
|
||||
class CollectShots(api.ContextPlugin):
|
||||
class CollectShots(api.InstancePlugin):
|
||||
"""Collect Shot from Clip."""
|
||||
|
||||
# Run just before CollectClipSubsets
|
||||
|
|
@ -10,39 +10,39 @@ class CollectShots(api.ContextPlugin):
|
|||
hosts = ["nukestudio"]
|
||||
families = ["clip"]
|
||||
|
||||
def process(self, context):
|
||||
for instance in context[:]:
|
||||
# Exclude non-tagged instances.
|
||||
tagged = False
|
||||
for tag in instance.data["tags"]:
|
||||
if tag["name"].lower() == "hierarchy":
|
||||
tagged = True
|
||||
def process(self, instance):
|
||||
self.log.debug(
|
||||
"Skipping \"{}\" because its not tagged with "
|
||||
"\"Hierarchy\"".format(instance))
|
||||
# Exclude non-tagged instances.
|
||||
tagged = False
|
||||
for tag in instance.data["tags"]:
|
||||
if tag["name"].lower() == "hierarchy":
|
||||
tagged = True
|
||||
|
||||
if not tagged:
|
||||
self.log.debug(
|
||||
"Skipping \"{}\" because its not tagged with "
|
||||
"\"Hierarchy\"".format(instance)
|
||||
)
|
||||
continue
|
||||
|
||||
# Collect data.
|
||||
data = {}
|
||||
for key, value in instance.data.iteritems():
|
||||
data[key] = value
|
||||
|
||||
data["family"] = "shot"
|
||||
data["families"] = []
|
||||
|
||||
data["subset"] = data["family"] + "Main"
|
||||
|
||||
data["name"] = data["subset"] + "_" + data["asset"]
|
||||
|
||||
data["label"] = data["asset"] + " - " + data["subset"] + " - tasks: {} - assetbuilds: {}".format(
|
||||
data["tasks"], [x["name"] for x in data.get("assetbuilds", [])]
|
||||
if not tagged:
|
||||
self.log.debug(
|
||||
"Skipping \"{}\" because its not tagged with "
|
||||
"\"Hierarchy\"".format(instance)
|
||||
)
|
||||
return
|
||||
|
||||
# Create instance.
|
||||
self.log.debug("Creating instance with: {}".format(data["name"]))
|
||||
instance.context.create_instance(**data)
|
||||
# Collect data.
|
||||
data = {}
|
||||
for key, value in instance.data.iteritems():
|
||||
data[key] = value
|
||||
|
||||
self.log.debug("_ context: {}".format(context[:]))
|
||||
data["family"] = "shot"
|
||||
data["families"] = []
|
||||
|
||||
data["subset"] = data["family"] + "Main"
|
||||
|
||||
data["name"] = data["subset"] + "_" + data["asset"]
|
||||
|
||||
data["label"] = data["asset"] + " - " + data["subset"] + " - tasks: {} - assetbuilds: {}".format(
|
||||
data["tasks"], [x["name"] for x in data.get("assetbuilds", [])]
|
||||
)
|
||||
|
||||
# Create instance.
|
||||
self.log.debug("Creating instance with: {}".format(data["name"]))
|
||||
instance.context.create_instance(**data)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from pyblish import api
|
||||
|
||||
import os
|
||||
|
||||
class CollectClipTagFrameStart(api.InstancePlugin):
|
||||
"""Collect FrameStart from Tags of selected track items."""
|
||||
|
|
@ -19,8 +19,21 @@ class CollectClipTagFrameStart(api.InstancePlugin):
|
|||
|
||||
# gets only task family tags and collect labels
|
||||
if "frameStart" in t_family:
|
||||
t_number = t_metadata.get("tag.number", "")
|
||||
start_frame = int(t_number)
|
||||
t_value = t_metadata.get("tag.value", None)
|
||||
|
||||
# backward compatibility
|
||||
t_number = t_metadata.get("tag.number", None)
|
||||
start_frame = t_number or t_value
|
||||
|
||||
try:
|
||||
start_frame = int(start_frame)
|
||||
except ValueError:
|
||||
if "source" in t_value:
|
||||
source_first = instance.data["sourceFirst"]
|
||||
source_in = instance.data["sourceIn"]
|
||||
handle_start = instance.data["handleStart"]
|
||||
start_frame = (source_first + source_in) - handle_start
|
||||
|
||||
instance.data["startingFrame"] = start_frame
|
||||
self.log.info("Start frame on `{0}` set to `{1}`".format(
|
||||
instance, start_frame
|
||||
|
|
|
|||
32
pype/plugins/nukestudio/publish/collect_tag_retime.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
from pyblish import api
|
||||
|
||||
|
||||
class CollectTagRetime(api.InstancePlugin):
|
||||
"""Collect Retiming from Tags of selected track items."""
|
||||
|
||||
order = api.CollectorOrder + 0.014
|
||||
label = "Collect Retiming Tag"
|
||||
hosts = ["nukestudio"]
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# gets tags
|
||||
tags = instance.data["tags"]
|
||||
|
||||
for t in tags:
|
||||
t_metadata = dict(t["metadata"])
|
||||
t_family = t_metadata.get("tag.family", "")
|
||||
|
||||
# gets only task family tags and collect labels
|
||||
if "retiming" in t_family:
|
||||
margin_in = t_metadata.get("tag.marginIn", "")
|
||||
margin_out = t_metadata.get("tag.marginOut", "")
|
||||
|
||||
instance.data["retimeMarginIn"] = int(margin_in)
|
||||
instance.data["retimeMarginOut"] = int(margin_out)
|
||||
instance.data["retime"] = True
|
||||
|
||||
self.log.info("retimeMarginIn: `{}`".format(margin_in))
|
||||
self.log.info("retimeMarginOut: `{}`".format(margin_out))
|
||||
|
||||
instance.data["families"] += ["retime"]
|
||||
231
pype/plugins/nukestudio/publish/extract_effects.py
Normal file
|
|
@ -0,0 +1,231 @@
|
|||
# from pype import plugins
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import pyblish.api
|
||||
import tempfile
|
||||
from avalon import io, api
|
||||
|
||||
class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
|
||||
"""Collect video tracks effects into context."""
|
||||
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Export Soft Lut Effects"
|
||||
families = ["lut"]
|
||||
|
||||
def process(self, instance):
|
||||
item = instance.data["item"]
|
||||
effects = instance.data.get("effectTrackItems")
|
||||
|
||||
instance.data["families"] = [f for f in instance.data.get("families", []) if f not in ["lut"]]
|
||||
|
||||
self.log.debug("___ instance.data[families]: `{}`".format(instance.data["families"]))
|
||||
|
||||
# skip any without effects
|
||||
if not effects:
|
||||
return
|
||||
|
||||
subset = instance.data.get("subset")
|
||||
subset_split = re.findall(r'[A-Z][^A-Z]*', subset)
|
||||
|
||||
if len(subset_split) > 0:
|
||||
root_name = subset.replace(subset_split[0], "")
|
||||
subset_split.insert(0, root_name.capitalize())
|
||||
|
||||
subset_split.insert(0, "lut")
|
||||
|
||||
self.log.debug("creating staging dir")
|
||||
# staging_dir = self.staging_dir(instance)
|
||||
|
||||
# TODO: only provisory will be replace by function
|
||||
staging_dir = instance.data.get('stagingDir', None)
|
||||
|
||||
if not staging_dir:
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
)
|
||||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
self.log.debug("creating staging dir: `{}`".format(staging_dir))
|
||||
|
||||
transfers = list()
|
||||
if "transfers" not in instance.data:
|
||||
instance.data["transfers"] = list()
|
||||
|
||||
name = "".join(subset_split)
|
||||
ext = "json"
|
||||
file = name + "." + ext
|
||||
|
||||
# create new instance and inherit data
|
||||
data = {}
|
||||
for key, value in instance.data.iteritems():
|
||||
data[key] = value
|
||||
|
||||
# change names
|
||||
data["subset"] = name
|
||||
data["family"] = "lut"
|
||||
data["families"] = []
|
||||
data["name"] = data["subset"] + "_" + data["asset"]
|
||||
data["label"] = "{} - {} - ({})".format(
|
||||
data['asset'], data["subset"], os.path.splitext(file)[1]
|
||||
)
|
||||
data["source"] = data["sourcePath"]
|
||||
|
||||
# create new instance
|
||||
instance = instance.context.create_instance(**data)
|
||||
|
||||
dst_dir = self.resource_destination_dir(instance)
|
||||
|
||||
# change paths in effects to files
|
||||
for k, effect in effects["effects"].items():
|
||||
trn = self.copy_linked_files(effect, dst_dir)
|
||||
if trn:
|
||||
transfers.append((trn[0], trn[1]))
|
||||
|
||||
instance.data["transfers"].extend(transfers)
|
||||
self.log.debug("_ transfers: `{}`".format(
|
||||
instance.data["transfers"]))
|
||||
|
||||
# create representations
|
||||
instance.data["representations"] = list()
|
||||
|
||||
transfer_data = [
|
||||
"handleStart", "handleEnd", "sourceIn", "sourceOut",
|
||||
"frameStart", "frameEnd", "sourceInH", "sourceOutH",
|
||||
"clipIn", "clipOut", "clipInH", "clipOutH", "asset", "track",
|
||||
"version"
|
||||
]
|
||||
|
||||
# pass data to version
|
||||
version_data = dict()
|
||||
version_data.update({k: instance.data[k] for k in transfer_data})
|
||||
|
||||
# add to data of representation
|
||||
version_data.update({
|
||||
"handles": version_data['handleStart'],
|
||||
"colorspace": item.sourceMediaColourTransform(),
|
||||
"colorspaceScript": instance.context.data["colorspace"],
|
||||
"families": ["plate", "lut"],
|
||||
"subset": name,
|
||||
"fps": instance.context.data["fps"]
|
||||
})
|
||||
instance.data["versionData"] = version_data
|
||||
|
||||
representation = {
|
||||
'files': file,
|
||||
'stagingDir': staging_dir,
|
||||
'name': "lut" + ext.title(),
|
||||
'ext': ext
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.debug("_ representations: `{}`".format(
|
||||
instance.data["representations"]))
|
||||
|
||||
self.log.debug("_ version_data: `{}`".format(
|
||||
instance.data["versionData"]))
|
||||
|
||||
with open(os.path.join(staging_dir, file), "w") as outfile:
|
||||
outfile.write(json.dumps(effects, indent=4, sort_keys=True))
|
||||
|
||||
return
|
||||
|
||||
def copy_linked_files(self, effect, dst_dir):
|
||||
for k, v in effect["node"].items():
|
||||
if k in "file" and v is not '':
|
||||
base_name = os.path.basename(v)
|
||||
dst = os.path.join(dst_dir, base_name).replace("\\", "/")
|
||||
|
||||
# add it to the json
|
||||
effect["node"][k] = dst
|
||||
return (v, dst)
|
||||
|
||||
def resource_destination_dir(self, instance):
|
||||
anatomy = instance.context.data['anatomy']
|
||||
self.create_destination_template(instance, anatomy)
|
||||
|
||||
return os.path.join(
|
||||
instance.data["assumedDestination"],
|
||||
"resources"
|
||||
)
|
||||
|
||||
def create_destination_template(self, instance, anatomy):
|
||||
"""Create a filepath based on the current data available
|
||||
|
||||
Example template:
|
||||
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
|
||||
{subset}.{representation}
|
||||
Args:
|
||||
instance: the instance to publish
|
||||
|
||||
Returns:
|
||||
file path (str)
|
||||
"""
|
||||
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info(subset_name)
|
||||
asset_name = instance.data["asset"]
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
a_template = anatomy.templates
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True, "data": True})
|
||||
|
||||
template = a_template['publish']['path']
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
silo = asset['silo']
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += version["name"]
|
||||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
|
||||
padding = int(a_template['render']['padding'])
|
||||
|
||||
hierarchy = asset['data']['parents']
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = "/".join(hierarchy)
|
||||
|
||||
template_data = {"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name,
|
||||
"code": project['data']['code']},
|
||||
"silo": silo,
|
||||
"family": instance.data['family'],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"frame": ('#' * padding),
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
self.log.info(template_data)
|
||||
instance.data["template"] = template
|
||||
# We take the parent folder of representation 'filepath'
|
||||
instance.data["assumedDestination"] = os.path.dirname(
|
||||
anatomy.format(template_data)["publish"]["path"]
|
||||
)
|
||||
|
|
@ -124,6 +124,26 @@ class Popup2(Popup):
|
|||
fix = self.widgets["show"]
|
||||
fix.setText("Fix")
|
||||
|
||||
def calculate_window_geometry(self):
|
||||
"""Respond to status changes
|
||||
|
||||
On creation, align window with screen bottom right.
|
||||
|
||||
"""
|
||||
parent_widget = self.parent()
|
||||
|
||||
app = QtWidgets.QApplication.instance()
|
||||
if parent_widget:
|
||||
screen = app.desktop().screenNumber(parent_widget)
|
||||
else:
|
||||
screen = app.desktop().screenNumber(app.desktop().cursor().pos())
|
||||
center_point = app.desktop().screenGeometry(screen).center()
|
||||
|
||||
frame_geo = self.frameGeometry()
|
||||
frame_geo.moveCenter(center_point)
|
||||
|
||||
return frame_geo
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def application():
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
<?xml version="1.0" ?><svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><g><g><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="-0.0000027" x2="512" y1="256" y2="256"><stop offset="0" style="stop-color:#33B49D"/><stop offset="1" style="stop-color:#00A185"/></linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666641" x2="469.3333435" y1="256.0005188" y2="256.0005188"><stop offset="0" style="stop-color:#00A185"/><stop offset="1" style="stop-color:#33B49D"/></linearGradient><path d="M256,469.3338623c-117.6315308,0-213.3333435-95.7023926-213.3333435-213.3333435 c0-117.6314545,95.7018051-213.333313,213.3333435-213.333313c117.6362,0,213.3333435,95.7018661,213.3333435,213.333313 C469.3333435,373.6314697,373.6362,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/></g><g><rect height="127.4010696" opacity="0.3" width="127.4021301" x="200.2975464" y="200.2986603"/><rect height="35.229332" opacity="0.3" width="10" x="302.7012634" y="144"/><rect height="35.229332" opacity="0.3" width="10" x="273.5652771" y="144"/><rect height="35.229332" opacity="0.3" width="10" x="244.4292755" y="144"/><rect height="35.229332" opacity="0.3" width="10" x="215.2985992" y="144"/><rect height="35.2341347" opacity="0.3" width="10" x="302.7012634" y="348.7658691"/><rect height="35.2341347" opacity="0.3" width="10" x="273.5652771" y="348.7658691"/><rect height="35.2341347" opacity="0.3" width="10" x="244.4292755" y="348.7658691"/><rect height="35.2341347" opacity="0.3" width="10" x="215.2985992" y="348.7658691"/><rect height="10" opacity="0.3" width="35.2292747" x="144" y="215.2986603"/><rect height="10" opacity="0.3" width="35.2292747" x="144" y="244.4341278"/><rect height="10" opacity="0.3" width="35.2292747" x="144" y="273.5653381"/><rect height="10" opacity="0.3" width="35.2292747" x="144" y="302.7013245"/><rect height="10" opacity="0.3" width="35.2347298" x="348.7652588" y="215.2986603"/><rect height="10" opacity="0.3" width="35.2347298" x="348.7652588" y="244.4341278"/><rect height="10" opacity="0.3" width="35.2347298" x="348.7652588" y="273.5653381"/><rect height="10" opacity="0.3" width="35.2347298" x="348.7652588" y="302.7013245"/></g><g><rect fill="#FFFFFF" height="127.4010696" width="127.4021301" x="192.2975464" y="192.2986603"/><rect fill="#FFFFFF" height="35.229332" width="10" x="294.7012634" y="136"/><rect fill="#FFFFFF" height="35.229332" width="10" x="265.5652771" y="136"/><rect fill="#FFFFFF" height="35.229332" width="10" x="236.4292755" y="136"/><rect fill="#FFFFFF" height="35.229332" width="10" x="207.2985992" y="136"/><rect fill="#FFFFFF" height="35.2341347" width="10" x="294.7012634" y="340.7658691"/><rect fill="#FFFFFF" height="35.2341347" width="10" x="265.5652771" y="340.7658691"/><rect fill="#FFFFFF" height="35.2341347" width="10" x="236.4292755" y="340.7658691"/><rect fill="#FFFFFF" height="35.2341347" width="10" x="207.2985992" y="340.7658691"/><rect fill="#FFFFFF" height="10" width="35.2292747" x="136" y="207.2986603"/><rect fill="#FFFFFF" height="10" width="35.2292747" x="136" y="236.4341278"/><rect fill="#FFFFFF" height="10" width="35.2292747" x="136" y="265.5653381"/><rect fill="#FFFFFF" height="10" width="35.2292747" x="136" y="294.7013245"/><rect fill="#FFFFFF" height="10" width="35.2347298" x="340.7652588" y="207.2986603"/><rect fill="#FFFFFF" height="10" width="35.2347298" x="340.7652588" y="236.4341278"/><rect fill="#FFFFFF" height="10" width="35.2347298" x="340.7652588" y="265.5653381"/><rect fill="#FFFFFF" height="10" width="35.2347298" x="340.7652588" y="294.7013245"/></g></g></svg>
|
||||
|
Before Width: | Height: | Size: 3.7 KiB |
|
|
@ -1,41 +0,0 @@
|
|||
<?xml version="1.0" ?>
|
||||
<svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g>
|
||||
<g>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="-0.0000027" x2="512" y1="256" y2="256">
|
||||
<stop offset="0" style="stop-color:#33B49D"/>
|
||||
<stop offset="1" style="stop-color:#008165"/>
|
||||
</linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666641" x2="469.3333435" y1="256.0005188" y2="256.0005188">
|
||||
<stop offset="0" style="stop-color:#008165"/>
|
||||
<stop offset="1" style="stop-color:#33B49D"/>
|
||||
</linearGradient>
|
||||
<path
|
||||
d="
|
||||
M 256,469.3338623
|
||||
c -117.6315308,0-213.3333435-95.7023926-213.3333435-213.3333435
|
||||
c 0-117.6314545,95.7018051 -213.333313,213.3333435 -213.333313
|
||||
c 117.6362,0,213.3333435,95.7018661,213.3333435,213.333313
|
||||
C 469.3333435,373.6314697,373.6362,469.3338623,256,469.3338623
|
||||
z"
|
||||
fill="url(#SVGID_2_)"
|
||||
/>
|
||||
</g>
|
||||
<g>
|
||||
<circle cx="170.0324707" cy="174.964798" opacity="0.3" r="26.0319996"/>
|
||||
<rect height="10" opacity="0.3" width="161.7552032" x="222.2442017" y="169.965332"/>
|
||||
<circle cx="170.0324707" cy="264" opacity="0.3" r="26.0319996"/>
|
||||
<rect height="10" opacity="0.3" width="161.7552032" x="222.2442017" y="259.0010681"/>
|
||||
<circle cx="170.0324707" cy="353.034668" opacity="0.3" r="26.0319996"/>
|
||||
<rect height="10" opacity="0.3" width="161.7552032" x="222.2442017" y="348.0341492"/>
|
||||
</g>
|
||||
<g>
|
||||
<circle cx="162.0324707" cy="166.964798" fill="#FFFFFF" r="26.0319996"/>
|
||||
<rect fill="#FFFFFF" height="10" width="161.7552032" x="214.2442017" y="161.965332"/>
|
||||
<circle cx="162.0324707" cy="256" fill="#FFFFFF" r="26.0319996"/>
|
||||
<rect fill="#FFFFFF" height="10" width="161.7552032" x="214.2442017" y="251.0010681"/>
|
||||
<circle cx="162.0324707" cy="345.034668" fill="#FFFFFF" r="26.0319996"/>
|
||||
<rect fill="#FFFFFF" height="10" width="161.7552032" x="214.2442017" y="340.0341492"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 2.4 KiB |
|
|
@ -1,374 +0,0 @@
|
|||
<?xml version="1.0" ?>
|
||||
<svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g>
|
||||
<g>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="-0.0000027" x2="512" y1="256" y2="256">
|
||||
<stop offset="0" style="stop-color:#ffffff"/>
|
||||
<stop offset="1" style="stop-color:#aaaaaa"/>
|
||||
</linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666641" x2="469.3333435" y1="256.0005188" y2="256.0005188">
|
||||
<stop offset="0" style="stop-color:#aaaaaa"/>
|
||||
<stop offset="1" style="stop-color:#ffffff"/>
|
||||
</linearGradient>
|
||||
<path d="M256,469.3338623c-117.6314697,0-213.3333435-95.7023926-213.3333435-213.3333435 c0-117.6314545,95.7018661-213.333313,213.3333435-213.333313c117.6357422,0,213.3333435,95.7018661,213.3333435,213.333313 C469.3333435,373.6314697,373.6357422,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/>
|
||||
</g>
|
||||
<g transform="translate(80 80) scale(0.5 0.5)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M642.761,7.857c27.524-13.874,20.688-7.637,36.396,5.242
|
||||
c21.389,17.531-47.403,113.702-85.56,101.273C597.209,66.095,628.271,15.159,642.761,7.857z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M388.764,16.05c10.221,28.978,21.564,101.561-24.58,98.322
|
||||
C347.148,88.668,329.25,7.829,388.764,16.05z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M303.417,128.713c-9.341,33.239-52.357,26.405-65.547,0
|
||||
C209.087,71.091,320.382,68.365,303.417,128.713z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M519.854,524.039c-24.021-0.559-30.431-18.727-40.963-32.773
|
||||
c-15.603,3.518-20.585,17.65-40.968,16.385c-21.542-2.428-6.449,31.787-32.771,24.582c8.412-27.529-25.482-12.754-40.968-16.387
|
||||
c-12.178,6.941-11.856,26.383-16.386,40.969c-14.608-19.396,16.189-66.479-40.968-57.357c-1.641-31.131-34.453-31.09-32.772-65.547
|
||||
c-50.24,12.574-80.853,44.779-131.094,57.355c26.667-44.338,87.719-54.301,131.094-81.934c-5.64-32.949,7.849-46.771,8.193-73.74
|
||||
c-16.702-18.804-68.042-2.965-98.322-8.193c21.309-16.93,59.1-17.375,98.322-16.386c23.035-10.227,27.083-33.584,16.386-57.354
|
||||
c10.475,0.45,11.347,10.5,24.579,8.193c2.721-32.761-14.605-45.48-8.193-81.933c21.864,5.446,30.003,24.619,40.968,40.965
|
||||
c10.298-3.356,16.748-10.566,16.386-24.579c6.384,1.809,9.526,6.86,8.193,16.386C464.654,198.51,397.173,40.41,495.275,81.598
|
||||
c16.984,7.13,16.283,16.491,16.386,32.775c-3.441,15.665-2.963,35.265-24.576,32.772c-7.125,8.614-11.859,18.676-24.582,32.775
|
||||
c51.397,11.42,94.795,30.834,106.515,81.933c32.949,5.641,46.771-7.848,73.743-8.193c-13.629,21.878-56.536,14.474-73.743,32.772
|
||||
c-1.532,68.441,5.806,95.405-32.777,122.901c1.401,12.258-2.917,30.236,8.198,32.771c-11.423,13.158-40.435,8.727-49.163,24.582
|
||||
C503.696,485.572,523.483,493.105,519.854,524.039z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M560.825,147.145
|
||||
C591.118,171.985,535.943,174.746,560.825,147.145L560.825,147.145z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M749.27,220.884c17.286,2.513,8.66,15.924,0,16.389
|
||||
c-8.66,0.461-23.257,1.407-24.579-8.196C726.069,219.534,731.983,218.372,749.27,220.884z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M708.305,237.273c1.556-9.304,8.222-4.549,8.193,0
|
||||
C714.942,246.574,708.276,241.82,708.305,237.273z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M183.93,327.399C184.764,337.301,172.022,332.436,183.93,327.399
|
||||
L183.93,327.399z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M85.611,335.592c3.008,24.861-26.687,17.007-40.968,24.579
|
||||
C43.455,337.136,64.712,336.541,85.611,335.592z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M642.761,360.171c0.217-9.093,45.956-3.205,65.544-8.193
|
||||
c-2.609,21.727,46.896,10.463,32.771,24.583C726.274,392.045,642.544,369.264,642.761,360.171z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M199.897,398.458c-12.073,32.979-66.239,39.39-68.1-9.286
|
||||
C129.943,340.492,211.973,365.481,199.897,398.458z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M642.761,458.492c17.845,3.156,22.146,16.09,24.579,32.773
|
||||
c31.985-16.23,70.102,20.02,65.544,40.967c-4.559,20.947-37.74,31.988-49.158,8.193c3.703,27.465-35.017,31.943-49.158,32.775
|
||||
c-6.717,0.387-26.334-3.59-24.579-24.58c-24.311-3.002-42.555,0.217-49.163-32.775
|
||||
C554.216,482.852,595.181,450.078,642.761,458.492z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M142.964,491.266
|
||||
C143.796,501.166,131.054,496.301,142.964,491.266L142.964,491.266z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M298.637,507.65c0.464-4.996,1.225-9.697,8.193-8.193
|
||||
C306.366,504.455,305.605,509.154,298.637,507.65z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M290.444,515.846c0.464-4.998,1.225-9.699,8.193-8.195
|
||||
C298.173,512.646,297.413,517.352,290.444,515.846z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M101.997,540.426c-3.196,13.438-27.527,6.361-32.772-8.193
|
||||
C63.98,517.672,105.193,526.988,101.997,540.426z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M290.444,515.846
|
||||
C291.276,525.746,278.534,520.883,290.444,515.846L290.444,515.846z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M519.854,524.039
|
||||
C529.757,523.207,524.891,535.947,519.854,524.039L519.854,524.039z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M3.678,573.201c-3.196-5.688-6.361-5.926,0-16.387
|
||||
s67.646-29.844,57.354,16.387C55.537,602.986,6.874,578.881,3.678,573.201z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M405.152,556.814c2.823-9.891,21.799-6.627,24.579,0
|
||||
C437.16,574.533,399.726,575.799,405.152,556.814z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M667.34,573.201c11.993,7.121,27.775,24.111,24.579,32.771
|
||||
s-21.389,16.602-32.772,0C647.752,589.365,664.765,584.277,667.34,573.201z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M749.27,622.359c5.003,5.24,4.313,7.291,0,8.191
|
||||
c-9.64-1.287-23.257,1.414-24.579-8.191C736.382,619.168,744.278,617.117,749.27,622.359z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M347.798,646.938c14.554,8.66,18.898,32.994,0,32.771
|
||||
C328.899,679.492,333.244,638.277,347.798,646.938z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M577.211,646.938c12.928,14.381,37.62,17.002,40.971,40.965
|
||||
C587.939,690.836,583.939,667.527,577.211,646.938z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M175.736,679.709
|
||||
C174.905,669.807,187.647,674.674,175.736,679.709L175.736,679.709z"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M175.736,679.709c-1,20.854-19.334,40.51-32.772,32.773
|
||||
C129.527,704.756,150.086,675.914,175.736,679.709z"/>
|
||||
|
||||
</g>
|
||||
<g transform="translate(80 70) scale(0.7 0.7)">
|
||||
<path id="Selection"
|
||||
fill="#e5e5e5"
|
||||
d="M 413.00,255.00
|
||||
C 413.00,255.00 408.00,255.00 408.00,255.00
|
||||
408.00,255.00 408.00,439.00 408.00,439.00
|
||||
401.94,437.78 374.70,423.25 373.82,420.55
|
||||
373.14,418.48 374.09,98.55 374.09,98.55
|
||||
374.09,95.17 374.22,92.97 373.36,89.64
|
||||
368.51,70.71 345.00,57.64 328.82,57.27
|
||||
328.82,57.27 249.00,56.00 249.00,56.00
|
||||
249.00,56.00 105.64,57.09 105.64,57.09
|
||||
90.17,57.11 75.87,66.25 69.64,80.82
|
||||
67.20,86.51 64.76,93.81 64.73,100.00
|
||||
64.73,100.00 64.91,397.09 64.91,397.09
|
||||
64.95,422.07 85.64,446.45 110.64,446.73
|
||||
110.64,446.73 214.00,448.00 214.00,448.00
|
||||
214.00,448.00 404.91,446.91 404.91,446.91
|
||||
426.82,447.00 454.59,424.12 454.64,395.64
|
||||
454.64,395.64 454.73,225.45 454.73,225.45
|
||||
454.71,212.87 449.50,203.71 437.09,199.27
|
||||
431.28,197.20 416.91,197.64 412.73,202.82
|
||||
411.27,204.73 408.19,206.11 408.18,208.36
|
||||
408.18,208.36 413.00,255.00 413.00,255.00 Z" />
|
||||
<g>
|
||||
<linearGradient id="fadeGrad" y2="1" x2="0">
|
||||
<stop offset="0.5" stop-color="#e5e5e5" stop-opacity="0"/>
|
||||
<stop offset="1" stop-color="#e5e5e5" stop-opacity="1"/>
|
||||
</linearGradient><path id="grey_fill"
|
||||
|
||||
fill="url(#fadeGrad)"
|
||||
d="M 372.00,204.00
|
||||
C 372.00,204.00 372.00,437.00 372.00,437.00
|
||||
372.00,437.00 408.00,437.00 408.00,437.00
|
||||
408.00,437.00 408.00,204.00 408.00,204.00
|
||||
408.00,204.00 372.00,204.00 372.00,204.00 Z" />
|
||||
</g>
|
||||
<path id="border"
|
||||
fill="#2c3e50"
|
||||
d="M 97.00,56.12
|
||||
C 92.47,57.54 89.03,58.99 85.00,61.35
|
||||
74.99,67.21 66.62,78.52 64.12,90.00
|
||||
64.12,90.00 64.12,112.00 64.12,112.00
|
||||
64.12,112.00 64.12,141.00 64.12,141.00
|
||||
64.12,141.00 64.12,399.00 64.12,399.00
|
||||
64.04,421.84 82.68,447.71 107.00,448.00
|
||||
107.00,448.00 214.00,448.00 214.00,448.00
|
||||
214.00,448.00 407.00,448.00 407.00,448.00
|
||||
432.43,447.96 455.96,423.15 456.00,398.00
|
||||
456.00,398.00 456.00,225.00 456.00,225.00
|
||||
455.97,208.44 444.97,196.00 428.00,196.00
|
||||
422.37,196.00 418.95,196.42 414.01,199.48
|
||||
403.91,205.73 400.02,217.75 400.00,229.00
|
||||
400.00,229.00 400.00,385.00 400.00,385.00
|
||||
400.00,388.14 399.82,392.11 401.31,394.95
|
||||
404.11,400.27 410.69,400.91 414.15,395.87
|
||||
416.23,392.83 415.99,388.53 416.00,385.00
|
||||
416.00,385.00 416.00,227.00 416.00,227.00
|
||||
416.01,223.53 415.73,220.08 417.74,217.04
|
||||
422.00,210.58 432.71,210.16 437.57,216.11
|
||||
440.32,219.47 439.99,222.95 440.00,227.00
|
||||
440.00,227.00 440.00,396.00 440.00,396.00
|
||||
439.99,399.36 440.10,401.68 439.24,405.00
|
||||
435.84,418.21 422.05,431.64 408.00,431.99
|
||||
387.86,432.47 376.09,417.15 376.00,398.00
|
||||
376.00,398.00 376.00,356.00 376.00,356.00
|
||||
376.00,356.00 376.00,98.00 376.00,98.00
|
||||
375.96,73.58 350.92,56.04 328.00,56.12
|
||||
328.00,56.12 175.00,56.12 175.00,56.12
|
||||
175.00,56.12 125.00,56.12 125.00,56.12
|
||||
125.00,56.12 97.00,56.12 97.00,56.12 Z
|
||||
M 371.00,432.00
|
||||
C 371.00,432.00 110.00,432.00 110.00,432.00
|
||||
106.92,432.00 103.99,432.10 101.00,431.20
|
||||
88.05,427.30 80.16,412.89 80.00,400.00
|
||||
80.00,400.00 80.00,355.00 80.00,355.00
|
||||
80.00,355.00 80.00,99.00 80.00,99.00
|
||||
80.01,95.19 79.89,92.73 81.05,89.00
|
||||
82.31,84.96 84.15,82.08 87.09,79.04
|
||||
95.54,70.33 104.02,72.00 115.00,72.00
|
||||
115.00,72.00 137.00,72.00 137.00,72.00
|
||||
137.00,72.00 327.00,72.00 327.00,72.00
|
||||
343.88,72.03 359.91,80.22 360.00,99.00
|
||||
360.00,99.00 360.00,158.00 360.00,158.00
|
||||
360.00,158.00 360.00,329.00 360.00,329.00
|
||||
360.00,329.00 360.00,376.00 360.00,376.00
|
||||
360.00,387.70 359.54,399.52 362.24,411.00
|
||||
364.74,421.66 367.08,423.20 371.00,432.00 Z" />
|
||||
<path id="text"
|
||||
fill="#2c3e50"
|
||||
d="M 183.00,110.64
|
||||
C 179.64,111.42 176.99,112.23 174.00,114.13
|
||||
157.51,124.60 159.97,141.24 160.00,158.00
|
||||
160.01,163.24 160.33,167.99 162.10,173.00
|
||||
166.51,185.43 179.93,193.80 193.00,190.98
|
||||
208.62,187.62 215.82,175.17 216.00,160.00
|
||||
216.23,140.44 218.24,117.01 194.00,110.64
|
||||
190.07,109.75 186.95,109.89 183.00,110.64 Z
|
||||
M 229.10,110.77
|
||||
C 227.21,111.91 226.64,111.99 225.45,114.11
|
||||
223.13,118.24 224.00,132.64 224.00,138.00
|
||||
224.00,138.00 224.00,179.00 224.00,179.00
|
||||
224.05,182.84 224.15,186.73 227.28,189.49
|
||||
230.97,192.75 238.29,192.01 243.00,192.00
|
||||
258.52,191.98 277.71,186.71 274.56,167.00
|
||||
273.97,163.32 273.04,160.18 271.03,157.00
|
||||
269.78,155.02 266.97,152.06 266.51,150.00
|
||||
266.00,147.74 267.57,145.18 268.20,143.00
|
||||
269.04,140.08 269.06,137.01 268.99,134.00
|
||||
268.63,119.33 257.94,113.28 245.00,110.77
|
||||
239.84,109.82 234.22,109.45 229.10,110.77 Z
|
||||
M 320.00,164.00
|
||||
C 319.29,166.34 318.67,168.92 317.28,170.93
|
||||
312.51,177.90 302.17,177.38 298.30,170.93
|
||||
296.06,167.14 296.74,162.14 294.41,159.28
|
||||
291.16,155.30 285.55,156.17 282.74,160.13
|
||||
277.83,167.04 285.08,179.04 290.04,183.96
|
||||
300.83,194.65 316.52,193.80 326.90,182.96
|
||||
329.03,180.72 330.34,178.74 331.72,176.00
|
||||
340.59,158.44 327.85,149.91 313.00,143.42
|
||||
309.43,141.86 296.86,137.96 296.66,133.89
|
||||
296.37,128.22 304.01,124.60 309.00,125.57
|
||||
313.67,126.48 316.77,129.55 318.20,134.00
|
||||
319.62,138.40 318.92,143.38 325.00,144.67
|
||||
334.12,146.59 334.63,135.91 332.78,130.00
|
||||
329.78,120.39 321.94,112.67 312.00,110.52
|
||||
299.03,107.70 285.65,116.60 281.55,129.00
|
||||
280.62,132.69 279.59,137.37 281.55,141.00
|
||||
283.97,146.59 289.75,149.61 295.00,152.25
|
||||
295.00,152.25 320.00,164.00 320.00,164.00 Z
|
||||
M 136.00,128.00
|
||||
C 136.00,128.00 136.00,162.00 136.00,162.00
|
||||
135.99,167.25 136.21,172.61 130.90,175.55
|
||||
120.70,181.19 117.27,168.68 112.90,165.86
|
||||
108.58,163.08 103.38,165.23 101.78,170.02
|
||||
100.74,173.13 102.36,176.34 103.88,179.00
|
||||
108.07,186.34 115.38,191.75 124.00,191.98
|
||||
127.40,192.08 130.76,192.02 134.00,190.89
|
||||
137.66,189.62 140.24,187.67 142.96,184.96
|
||||
154.71,173.20 152.00,155.19 152.00,140.00
|
||||
152.00,140.00 152.00,127.00 152.00,127.00
|
||||
151.98,116.47 150.64,112.14 139.00,112.00
|
||||
134.77,111.95 127.63,111.45 124.13,113.85
|
||||
119.09,117.31 119.73,123.89 125.05,126.69
|
||||
128.06,128.27 132.65,128.00 136.00,128.00 Z
|
||||
M 186.00,125.32
|
||||
C 190.79,125.04 195.35,125.73 198.26,130.06
|
||||
200.13,132.83 199.98,135.82 200.00,139.00
|
||||
200.00,139.00 200.00,164.00 200.00,164.00
|
||||
199.92,169.87 198.61,174.57 192.00,175.88
|
||||
192.00,175.88 187.00,175.88 187.00,175.88
|
||||
177.07,175.73 176.01,170.43 176.00,162.00
|
||||
176.00,162.00 176.00,148.00 176.00,148.00
|
||||
176.00,143.84 175.41,134.40 177.17,131.02
|
||||
179.19,127.16 182.05,126.16 186.00,125.32 Z
|
||||
M 240.00,144.00
|
||||
C 240.00,144.00 240.00,125.00 240.00,125.00
|
||||
243.48,125.70 247.87,126.53 250.66,128.85
|
||||
255.54,132.93 254.41,138.65 248.98,141.55
|
||||
246.56,142.85 242.73,143.48 240.00,144.00 Z
|
||||
M 240.00,177.00
|
||||
C 240.00,177.00 240.00,160.00 240.00,160.00
|
||||
243.26,160.15 253.37,160.99 255.86,162.45
|
||||
260.04,164.92 261.00,171.96 256.77,174.91
|
||||
252.86,177.64 244.73,177.00 240.00,177.00 Z
|
||||
M 119.00,216.57
|
||||
C 113.27,218.89 109.82,224.65 115.28,229.49
|
||||
117.99,231.89 121.59,231.95 125.00,232.00
|
||||
125.00,232.00 161.00,232.00 161.00,232.00
|
||||
164.53,231.99 168.83,232.23 171.87,230.15
|
||||
176.46,227.00 176.46,221.00 171.87,217.85
|
||||
169.27,216.07 166.02,216.04 163.00,216.00
|
||||
163.00,216.00 119.00,216.57 119.00,216.57 Z
|
||||
M 239.00,216.57
|
||||
C 232.65,219.14 229.74,225.77 236.13,230.15
|
||||
239.17,232.23 243.47,231.99 247.00,232.00
|
||||
247.00,232.00 289.00,232.00 289.00,232.00
|
||||
292.53,231.99 296.83,232.23 299.87,230.15
|
||||
304.46,227.00 304.46,221.00 299.87,217.85
|
||||
297.03,215.91 293.29,216.02 290.00,216.00
|
||||
290.00,216.00 239.00,216.57 239.00,216.57 Z
|
||||
M 119.00,240.57
|
||||
C 112.65,243.14 109.74,249.77 116.13,254.15
|
||||
119.17,256.23 123.47,255.99 127.00,256.00
|
||||
127.00,256.00 193.00,256.00 193.00,256.00
|
||||
196.14,256.00 200.11,256.18 202.95,254.69
|
||||
208.27,251.89 208.91,245.31 203.87,241.85
|
||||
200.83,239.77 196.53,240.01 193.00,240.00
|
||||
193.00,240.00 145.00,240.00 145.00,240.00
|
||||
145.00,240.00 119.00,240.57 119.00,240.57 Z
|
||||
M 239.00,240.57
|
||||
C 232.65,243.14 229.74,249.77 236.13,254.15
|
||||
239.17,256.23 243.47,255.99 247.00,256.00
|
||||
247.00,256.00 313.00,256.00 313.00,256.00
|
||||
316.53,255.99 320.83,256.23 323.87,254.15
|
||||
328.91,250.69 328.27,244.11 322.95,241.31
|
||||
320.11,239.82 316.14,240.00 313.00,240.00
|
||||
313.00,240.00 265.00,240.00 265.00,240.00
|
||||
265.00,240.00 239.00,240.57 239.00,240.57 Z
|
||||
M 119.00,264.57
|
||||
C 112.65,267.14 109.74,273.77 116.13,278.15
|
||||
119.17,280.23 123.47,279.99 127.00,280.00
|
||||
127.00,280.00 185.00,280.00 185.00,280.00
|
||||
188.14,280.00 192.11,280.18 194.95,278.69
|
||||
200.27,275.89 200.91,269.31 195.87,265.85
|
||||
192.83,263.77 188.53,264.01 185.00,264.00
|
||||
185.00,264.00 143.00,264.00 143.00,264.00
|
||||
136.67,264.00 124.68,263.54 119.00,264.57 Z
|
||||
M 239.00,264.57
|
||||
C 232.65,267.14 229.74,273.77 236.13,278.15
|
||||
239.17,280.23 243.47,279.99 247.00,280.00
|
||||
247.00,280.00 305.00,280.00 305.00,280.00
|
||||
308.14,280.00 312.11,280.18 314.95,278.69
|
||||
320.27,275.89 320.91,269.31 315.87,265.85
|
||||
312.83,263.77 308.53,264.01 305.00,264.00
|
||||
305.00,264.00 263.00,264.00 263.00,264.00
|
||||
256.67,264.00 244.68,263.54 239.00,264.57 Z
|
||||
M 239.00,288.57
|
||||
C 232.65,291.14 229.74,297.77 236.13,302.15
|
||||
239.17,304.23 243.47,303.99 247.00,304.00
|
||||
247.00,304.00 313.00,304.00 313.00,304.00
|
||||
316.14,304.00 320.11,304.18 322.95,302.69
|
||||
328.27,299.89 328.91,293.31 323.87,289.85
|
||||
320.83,287.77 316.53,288.01 313.00,288.00
|
||||
313.00,288.00 265.00,288.00 265.00,288.00
|
||||
265.00,288.00 239.00,288.57 239.00,288.57 Z
|
||||
M 119.00,312.57
|
||||
C 112.65,315.14 109.74,321.77 116.13,326.15
|
||||
119.17,328.23 123.47,327.99 127.00,328.00
|
||||
127.00,328.00 169.00,328.00 169.00,328.00
|
||||
172.53,327.99 176.83,328.23 179.87,326.15
|
||||
184.46,323.00 184.46,317.00 179.87,313.85
|
||||
177.03,311.91 173.29,312.02 170.00,312.00
|
||||
170.00,312.00 119.00,312.57 119.00,312.57 Z
|
||||
M 119.00,336.57
|
||||
C 112.65,339.14 109.74,345.77 116.13,350.15
|
||||
119.17,352.23 123.47,351.99 127.00,352.00
|
||||
127.00,352.00 193.00,352.00 193.00,352.00
|
||||
196.14,352.00 200.11,352.18 202.95,350.69
|
||||
208.27,347.89 208.91,341.31 203.87,337.85
|
||||
200.83,335.77 196.53,336.01 193.00,336.00
|
||||
193.00,336.00 145.00,336.00 145.00,336.00
|
||||
145.00,336.00 119.00,336.57 119.00,336.57 Z
|
||||
M 239.00,336.57
|
||||
C 233.27,338.89 229.82,344.65 235.28,349.49
|
||||
237.99,351.89 241.59,351.95 245.00,352.00
|
||||
245.00,352.00 281.00,352.00 281.00,352.00
|
||||
284.53,351.99 288.83,352.23 291.87,350.15
|
||||
296.33,347.09 296.33,340.91 291.87,337.85
|
||||
289.27,336.07 286.02,336.04 283.00,336.00
|
||||
283.00,336.00 239.00,336.57 239.00,336.57 Z
|
||||
M 119.00,360.57
|
||||
C 112.65,363.14 109.74,369.77 116.13,374.15
|
||||
119.17,376.23 123.47,375.99 127.00,376.00
|
||||
127.00,376.00 185.00,376.00 185.00,376.00
|
||||
188.14,376.00 192.11,376.18 194.95,374.69
|
||||
200.27,371.89 200.91,365.31 195.87,361.85
|
||||
192.83,359.77 188.53,360.01 185.00,360.00
|
||||
185.00,360.00 143.00,360.00 143.00,360.00
|
||||
136.67,360.00 124.68,359.54 119.00,360.57 Z
|
||||
M 239.00,360.57
|
||||
C 232.65,363.14 229.74,369.77 236.13,374.15
|
||||
239.17,376.23 243.47,375.99 247.00,376.00
|
||||
247.00,376.00 313.00,376.00 313.00,376.00
|
||||
316.14,376.00 320.11,376.18 322.95,374.69
|
||||
328.27,371.89 328.91,365.31 323.87,361.85
|
||||
320.83,359.77 316.53,360.01 313.00,360.00
|
||||
313.00,360.00 265.00,360.00 265.00,360.00
|
||||
265.00,360.00 239.00,360.57 239.00,360.57 Z
|
||||
M 119.00,384.57
|
||||
C 112.65,387.14 109.74,393.77 116.13,398.15
|
||||
119.17,400.23 123.47,399.99 127.00,400.00
|
||||
127.00,400.00 193.00,400.00 193.00,400.00
|
||||
196.14,400.00 200.11,400.18 202.95,398.69
|
||||
208.27,395.89 208.91,389.31 203.87,385.85
|
||||
200.83,383.77 196.53,384.01 193.00,384.00
|
||||
193.00,384.00 145.00,384.00 145.00,384.00
|
||||
145.00,384.00 119.00,384.57 119.00,384.57 Z
|
||||
M 239.00,384.57
|
||||
C 232.65,387.14 229.74,393.77 236.13,398.15
|
||||
239.17,400.23 243.47,399.99 247.00,400.00
|
||||
247.00,400.00 305.00,400.00 305.00,400.00
|
||||
308.53,399.99 312.83,400.23 315.87,398.15
|
||||
320.91,394.69 320.27,388.11 314.95,385.31
|
||||
312.11,383.82 308.14,384.00 305.00,384.00
|
||||
305.00,384.00 263.00,384.00 263.00,384.00
|
||||
256.67,384.00 244.68,383.54 239.00,384.57 Z" />
|
||||
</g>
|
||||
<g>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" fill="#9C141C" d="M126.579,753.453c-7.296,8.654-15.924,8.414-24.582,0
|
||||
c-8.657-8.416-11.982-46.715,16.386-40.971C139.233,713.313,133.872,744.797,126.579,753.453z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 23 KiB |
88
res/ftrack/action_icons/PrepareProject.svg
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
<?xml version="1.0" ?>
|
||||
<svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g>
|
||||
<g>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="-0.0000027" x2="512" y1="256" y2="256">
|
||||
<stop offset="0" style="stop-color:#A364AB"/>
|
||||
<stop offset="1" style="stop-color:#8C3D96"/>
|
||||
</linearGradient>
|
||||
<circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666641" x2="469.3333435" y1="256.0005188" y2="256.0005188">
|
||||
<stop offset="0" style="stop-color:#8C3D96"/>
|
||||
<stop offset="1" style="stop-color:#A364AB"/>
|
||||
</linearGradient>
|
||||
<path d="M256,469.3338623c-117.6315155,0-213.3333435-95.7018738-213.3333435-213.3333435 S138.3684845,42.6671982,256,42.6671982c117.6356812,0,213.3333435,95.7018661,213.3333435,213.333313 S373.6356812,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/>
|
||||
</g>
|
||||
|
||||
<path id="Shadow"
|
||||
opacity="0.3"
|
||||
d="M 320.92,166.77
|
||||
C 320.92,166.77 282.98,166.77 282.98,166.77
|
||||
282.98,166.77 282.98,258.00 282.98,258.00
|
||||
282.98,258.00 236.32,258.00 236.32,258.00
|
||||
236.30,258.09 236.35,268.00 236.32,268.00
|
||||
236.32,268.00 282.98,268.00 282.98,268.00
|
||||
282.98,268.00 282.98,359.23 282.98,359.23
|
||||
282.98,359.23 320.92,359.23 320.92,359.23
|
||||
320.92,359.23 320.92,380.76 320.92,380.76
|
||||
320.92,380.76 374.00,380.76 374.00,380.76
|
||||
374.00,380.76 374.00,327.69 374.00,327.69
|
||||
374.00,327.69 320.92,327.69 320.92,327.69
|
||||
320.92,327.69 320.92,349.23 320.92,349.23
|
||||
320.92,349.23 292.98,349.23 292.98,349.23
|
||||
292.98,349.23 292.98,268.00 292.98,268.00
|
||||
292.98,268.00 320.92,268.00 320.92,268.00
|
||||
320.92,268.00 320.92,289.54 320.92,289.54
|
||||
320.92,289.54 374.00,289.54 374.00,289.54
|
||||
374.00,289.54 374.00,236.46 374.00,236.46
|
||||
374.00,236.46 320.92,236.46 320.92,236.46
|
||||
320.92,236.46 320.92,258.00 320.92,258.00
|
||||
320.92,258.00 292.98,258.00 292.98,258.00
|
||||
292.98,258.00 292.98,176.77 292.98,176.77
|
||||
292.98,176.77 320.92,176.77 320.92,176.77
|
||||
320.92,176.77 320.92,198.31 320.92,198.31
|
||||
320.92,198.31 374.00,198.31 374.00,198.31
|
||||
374.00,198.31 374.00,145.24 374.00,145.24
|
||||
374.00,145.24 320.92,145.24 320.92,145.24
|
||||
320.92,145.24 320.92,166.77 320.92,166.77 Z"
|
||||
/>
|
||||
<path
|
||||
id="Ico"
|
||||
fill="#fff"
|
||||
d="M 312.92,159.77
|
||||
C 312.92,159.77 274.98,159.77 274.98,159.77
|
||||
274.98,159.77 274.98,251.00 274.98,251.00
|
||||
274.98,251.00 228.32,251.00 228.32,251.00
|
||||
228.38,251.00 228.38,260.94 228.32,261.00
|
||||
228.32,261.00 274.98,261.00 274.98,261.00
|
||||
274.98,261.00 274.98,352.23 274.98,352.23
|
||||
274.98,352.23 312.92,352.23 312.92,352.23
|
||||
312.92,352.23 312.92,373.76 312.92,373.76
|
||||
312.92,373.76 366.00,373.76 366.00,373.76
|
||||
366.00,373.76 366.00,320.69 366.00,320.69
|
||||
366.00,320.69 312.92,320.69 312.92,320.69
|
||||
312.92,320.69 312.92,342.23 312.92,342.23
|
||||
312.92,342.23 284.98,342.23 284.98,342.23
|
||||
284.98,342.23 284.98,261.00 284.98,261.00
|
||||
284.98,261.00 312.92,261.00 312.92,261.00
|
||||
312.92,261.00 312.92,282.54 312.92,282.54
|
||||
312.92,282.54 366.00,282.54 366.00,282.54
|
||||
366.00,282.54 366.00,229.46 366.00,229.46
|
||||
366.00,229.46 312.92,229.46 312.92,229.46
|
||||
312.92,229.46 312.92,251.00 312.92,251.00
|
||||
312.92,251.00 284.98,251.00 284.98,251.00
|
||||
284.98,251.00 284.98,169.77 284.98,169.77
|
||||
284.98,169.77 312.92,169.77 312.92,169.77
|
||||
312.92,169.77 312.92,191.31 312.92,191.31
|
||||
312.92,191.31 366.00,191.31 366.00,191.31
|
||||
366.00,191.31 366.00,138.24 366.00,138.24
|
||||
366.00,138.24 312.92,138.24 312.92,138.24
|
||||
312.92,138.24 312.92,159.77 312.92,159.77 Z"
|
||||
/>
|
||||
<g transform="translate(78.00 56.00) scale(0.6) rotate(22)">
|
||||
<path
|
||||
d="M383.9994202,278.4917297v-28.9818573l-26.3045349-4.4810791 c-2.4773254-12.3055878-7.3322144-23.741333-14.0432129-33.8426666l15.4474792-21.79039l-20.4943848-20.4949341 l-21.7909546,15.447998c-10.1013184-6.7114716-21.5359802-11.5664063-33.8426514-14.0442657L278.4906616,144h-28.9829712 l-4.4794769,26.3045349c-12.3061218,2.4778595-23.7423859,7.3327942-33.8437195,14.0442657l-21.7909393-15.447998 l-20.493866,20.4949341l15.447998,21.79039c-6.7109375,10.1013336-11.5658722,21.5370789-14.0442657,33.8426666 l-26.3029327,4.4810791v28.9818573l26.3029327,4.480011c2.4783936,12.3061218,7.3333282,23.7429199,14.0442657,33.8426514 l-15.447998,21.7920227l20.493866,20.4933167l21.7925262-15.447998 c10.0997467,6.7120056,21.5360107,11.5664063,33.8421326,14.044281L249.5076904,384h28.9829712l4.4804993-26.3039856 c12.3066711-2.4778748,23.741333-7.3322754,33.8426514-14.044281l21.7909546,15.447998l20.4943848-20.4933167 l-15.4474792-21.7920227c6.7109985-10.0997314,11.5658875-21.5365295,14.0432129-33.8426514L383.9994202,278.4917297z M264,313.1525269c-27.1466675,0-49.1531067-22.0053406-49.1531067-49.1525269 c0-27.1461334,22.0064392-49.1519928,49.1531067-49.1519928c27.1460876,0,49.1524963,22.0058594,49.1524963,49.1519928 C313.1524963,291.1471863,291.1460876,313.1525269,264,313.1525269z" opacity="0.3"/><path d="M375.9994202,270.4917297v-28.9818573l-26.3045349-4.4810791 c-2.4773254-12.3055878-7.3322144-23.741333-14.0432129-33.8426666l15.4474792-21.79039l-20.4943848-20.4949341 l-21.7909546,15.447998c-10.1013184-6.7114716-21.5359802-11.5664063-33.8426514-14.0442657L270.4906616,136h-28.9829712 l-4.4794617,26.3045349c-12.3061371,2.4778595-23.7424011,7.3327942-33.8437347,14.0442657l-21.7909393-15.447998 l-20.493866,20.4949341l15.447998,21.79039c-6.7109375,10.1013336-11.5658569,21.5370789-14.0442657,33.8426666 l-26.3029327,4.4810791v28.9818573l26.3029327,4.480011c2.4784088,12.3061218,7.3333282,23.7429199,14.0442657,33.8426514 l-15.447998,21.7920227l20.493866,20.4933167l21.7925262-15.447998 c10.0997467,6.7120056,21.5360107,11.5664063,33.8421478,14.044281L241.5076904,376h28.9829712l4.4804993-26.3039856 c12.3066711-2.4778748,23.741333-7.3322754,33.8426514-14.044281l21.7909546,15.447998l20.4943848-20.4933167 l-15.4474792-21.7920227c6.7109985-10.0997314,11.5658875-21.5365295,14.0432129-33.8426514L375.9994202,270.4917297z M256,305.1525269c-27.1466675,0-49.1531067-22.0053406-49.1531067-49.1525269 c0-27.1461334,22.0064392-49.1520081,49.1531067-49.1520081c27.1460876,0,49.1524963,22.0058746,49.1524963,49.1520081 C305.1524963,283.1471863,283.1460876,305.1525269,256,305.1525269z" fill="#FFFFFF"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 7.4 KiB |
173
res/ftrack/action_icons/PypeAdmin.svg
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 512 512"
|
||||
>
|
||||
<g>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="-0.0000027" x2="512" y1="256" y2="256">
|
||||
<stop offset="0" style="stop-color:#33B49D"/>
|
||||
<stop offset="1" style="stop-color:#008165"/>
|
||||
</linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666641" x2="469.3333435" y1="256.0005188" y2="256.0005188">
|
||||
<stop offset="0" style="stop-color:#008165"/>
|
||||
<stop offset="1" style="stop-color:#33B49D"/>
|
||||
</linearGradient>
|
||||
<path
|
||||
d="
|
||||
M 256,469.3338623
|
||||
c -117.6315308,0-213.3333435-95.7023926-213.3333435-213.3333435
|
||||
c 0-117.6314545,95.7018051 -213.333313,213.3333435 -213.333313
|
||||
c 117.6362,0,213.3333435,95.7018661,213.3333435,213.333313
|
||||
C 469.3333435,373.6314697,373.6362,469.3338623,256,469.3338623
|
||||
z"
|
||||
fill="url(#SVGID_2_)"
|
||||
/>
|
||||
</g>
|
||||
<g transform="translate(114.00 60.00) scale(0.36)">
|
||||
<path id="Selection"
|
||||
fill="#fff"
|
||||
d="
|
||||
M 458.00,960.00
|
||||
C 450.88,953.38 442.00,950.89 432.43,939.99
|
||||
424.10,930.49 415.98,914.44 409.70,903.00
|
||||
409.70,903.00 387.22,863.00 387.22,863.00
|
||||
379.45,848.37 368.40,834.14 368.01,817.00
|
||||
367.68,802.29 373.94,787.57 384.04,777.00
|
||||
388.72,772.10 392.22,768.68 398.00,765.06
|
||||
401.36,762.96 406.75,760.75 408.23,756.91
|
||||
409.23,754.39 408.04,748.24 408.23,745.00
|
||||
408.23,745.00 408.23,728.00 408.23,728.00
|
||||
408.77,725.44 409.86,719.43 408.81,717.21
|
||||
407.54,714.52 394.42,706.83 391.00,703.82
|
||||
378.11,692.49 369.21,677.53 369.00,660.00
|
||||
368.74,637.82 378.18,625.54 388.72,607.00
|
||||
388.72,607.00 404.58,579.00 404.58,579.00
|
||||
419.04,553.47 430.38,524.76 461.00,515.03
|
||||
466.51,513.28 473.21,511.81 479.00,512.04
|
||||
487.55,512.37 497.34,515.55 505.00,519.26
|
||||
508.57,520.99 516.36,525.59 520.00,525.45
|
||||
524.42,525.28 536.68,517.19 541.00,514.60
|
||||
550.83,508.70 554.24,508.66 553.99,498.00
|
||||
553.92,495.14 553.75,492.58 552.15,490.09
|
||||
549.35,485.74 533.16,475.67 528.00,472.60
|
||||
506.88,460.04 485.49,452.03 462.00,445.29
|
||||
452.70,442.62 438.39,438.43 429.00,438.00
|
||||
429.00,438.00 429.00,417.00 429.00,417.00
|
||||
429.01,414.41 428.81,411.37 430.02,409.02
|
||||
432.91,403.42 446.25,397.85 452.00,394.69
|
||||
464.79,387.66 485.94,369.70 495.91,359.00
|
||||
527.77,324.81 548.67,279.79 552.09,233.00
|
||||
552.09,233.00 553.00,223.00 553.00,223.00
|
||||
553.00,223.00 553.00,205.00 553.00,205.00
|
||||
552.99,196.67 551.62,188.21 550.25,180.00
|
||||
542.09,131.16 517.05,84.54 479.00,52.44
|
||||
457.48,34.29 433.59,21.86 407.00,13.00
|
||||
391.73,7.91 364.92,2.02 349.00,2.00
|
||||
349.00,2.00 327.00,2.00 327.00,2.00
|
||||
306.93,2.03 274.74,10.01 256.00,17.45
|
||||
190.76,43.34 140.60,101.93 127.00,171.00
|
||||
125.43,178.97 123.04,195.15 123.00,203.00
|
||||
123.00,203.00 123.00,221.00 123.00,221.00
|
||||
123.00,221.00 124.87,242.00 124.87,242.00
|
||||
130.70,283.98 147.23,322.01 175.29,354.00
|
||||
186.40,366.68 200.07,378.48 214.00,387.97
|
||||
214.00,387.97 233.00,399.69 233.00,399.69
|
||||
237.27,402.07 242.48,404.45 244.83,409.01
|
||||
245.92,411.15 246.92,422.15 246.99,425.00
|
||||
247.08,429.22 247.31,433.36 243.77,436.35
|
||||
240.37,439.23 230.76,440.63 226.00,442.00
|
||||
226.00,442.00 202.00,449.66 202.00,449.66
|
||||
202.00,449.66 187.00,454.42 187.00,454.42
|
||||
158.57,465.16 122.49,483.86 100.00,504.17
|
||||
82.80,519.70 68.90,538.95 57.43,559.00
|
||||
32.23,603.04 14.06,661.71 7.28,712.00
|
||||
4.04,736.04 2.04,760.77 2.00,785.00
|
||||
2.00,785.00 1.00,802.00 1.00,802.00
|
||||
1.00,802.00 1.00,856.00 1.00,856.00
|
||||
1.06,891.28 37.59,906.11 66.00,916.00
|
||||
66.00,916.00 83.00,921.86 83.00,921.86
|
||||
83.00,921.86 104.00,927.87 104.00,927.87
|
||||
152.12,939.98 187.83,945.13 237.00,949.83
|
||||
237.00,949.83 256.00,951.17 256.00,951.17
|
||||
256.00,951.17 295.00,954.09 295.00,954.09
|
||||
295.00,954.09 324.00,956.00 324.00,956.00
|
||||
324.00,956.00 343.00,957.00 343.00,957.00
|
||||
343.00,957.00 358.00,958.00 358.00,958.00
|
||||
358.00,958.00 372.00,958.00 372.00,958.00
|
||||
372.00,958.00 387.00,959.00 387.00,959.00
|
||||
387.00,959.00 407.00,959.00 407.00,959.00
|
||||
407.00,959.00 422.00,960.00 422.00,960.00
|
||||
422.00,960.00 458.00,960.00 458.00,960.00
|
||||
Z
|
||||
M 597.00,494.00
|
||||
C 597.00,494.00 597.00,538.00 597.00,538.00
|
||||
597.00,540.78 597.21,544.42 595.83,546.89
|
||||
593.17,551.65 579.34,555.10 574.00,557.05
|
||||
558.16,562.86 541.56,572.75 528.00,582.73
|
||||
523.78,585.84 510.63,597.85 507.00,598.67
|
||||
502.87,599.60 495.59,594.53 492.00,592.40
|
||||
482.64,586.85 468.27,577.41 458.00,575.00
|
||||
458.00,575.00 431.72,620.00 431.72,620.00
|
||||
431.72,620.00 411.00,658.00 411.00,658.00
|
||||
411.00,658.00 459.00,685.00 459.00,685.00
|
||||
455.62,697.75 451.98,717.92 452.00,731.00
|
||||
452.00,731.00 453.00,751.00 453.00,751.00
|
||||
453.13,762.00 455.58,778.56 459.00,789.00
|
||||
459.00,789.00 426.00,807.58 426.00,807.58
|
||||
422.31,809.67 414.78,813.03 413.21,817.04
|
||||
411.47,821.45 420.54,834.72 423.00,839.00
|
||||
423.00,839.00 459.00,901.00 459.00,901.00
|
||||
459.00,901.00 491.00,882.85 491.00,882.85
|
||||
493.72,881.29 501.87,876.24 504.58,876.16
|
||||
508.36,876.05 513.34,881.46 516.00,883.84
|
||||
522.42,889.55 530.71,896.03 538.00,900.58
|
||||
548.81,907.32 560.10,913.34 572.00,917.94
|
||||
576.42,919.65 591.11,924.37 593.77,926.65
|
||||
597.41,929.75 596.99,933.68 597.00,938.00
|
||||
597.00,938.00 597.00,980.00 597.00,980.00
|
||||
597.00,980.00 693.00,980.00 693.00,980.00
|
||||
693.00,980.00 693.00,926.00 693.00,926.00
|
||||
711.77,923.35 741.60,908.84 757.00,897.86
|
||||
765.57,891.74 768.35,889.04 776.00,882.17
|
||||
778.42,880.00 782.65,876.06 786.00,875.96
|
||||
789.74,875.85 797.64,881.49 801.00,883.58
|
||||
810.62,889.54 821.70,896.56 832.00,901.00
|
||||
832.00,901.00 854.85,861.00 854.85,861.00
|
||||
854.85,861.00 880.00,816.00 880.00,816.00
|
||||
880.00,816.00 848.00,797.42 848.00,797.42
|
||||
844.21,795.25 837.01,791.80 835.03,787.96
|
||||
833.22,784.46 835.27,777.88 835.92,774.00
|
||||
837.31,765.75 838.99,753.22 839.00,745.00
|
||||
839.03,722.99 837.69,706.48 832.00,685.00
|
||||
832.00,685.00 879.00,659.00 879.00,659.00
|
||||
879.00,659.00 858.30,620.00 858.30,620.00
|
||||
858.30,620.00 842.42,592.00 842.42,592.00
|
||||
842.42,592.00 832.00,574.00 832.00,574.00
|
||||
821.43,577.69 809.70,585.32 800.00,591.20
|
||||
796.39,593.39 789.09,598.68 785.00,598.56
|
||||
781.12,598.45 776.87,594.02 774.00,591.58
|
||||
766.22,584.96 759.75,579.50 751.00,574.06
|
||||
739.95,567.20 728.14,561.41 716.00,556.81
|
||||
711.43,555.07 699.37,551.65 696.43,548.59
|
||||
693.64,545.68 694.01,541.72 694.00,538.00
|
||||
694.00,538.00 694.00,494.00 694.00,494.00
|
||||
694.00,494.00 597.00,494.00 597.00,494.00
|
||||
Z
|
||||
M 634.00,619.29
|
||||
C 667.81,616.13 706.02,628.51 729.91,653.04
|
||||
770.54,694.74 774.57,766.57 737.54,812.00
|
||||
725.64,826.59 714.66,834.43 698.00,842.75
|
||||
677.41,853.04 660.74,855.26 638.00,855.00
|
||||
631.50,854.92 623.34,853.21 617.00,851.63
|
||||
583.30,843.20 554.03,820.90 539.31,789.00
|
||||
529.16,767.01 527.89,753.66 528.00,730.00
|
||||
528.17,693.08 552.71,654.07 584.00,635.45
|
||||
600.28,625.76 615.54,622.10 634.00,619.29
|
||||
Z
|
||||
"
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 8.9 KiB |
114
res/ftrack/action_icons/PypeDoctor.svg
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
<?xml version="1.0" ?>
|
||||
<svg id="Layer_1" style="enable-background:new 0 0 512 512;" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g>
|
||||
<g>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="-0.0000027" x2="512" y1="256" y2="256">
|
||||
<stop offset="0" style="stop-color:#ffffff"/>
|
||||
<stop offset="1" style="stop-color:#aaaaaa"/>
|
||||
</linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666641" x2="469.3333435" y1="256.0005188" y2="256.0005188">
|
||||
<stop offset="0" style="stop-color:#aaaaaa"/>
|
||||
<stop offset="1" style="stop-color:#ffffff"/>
|
||||
</linearGradient>
|
||||
<path d="M256,469.3338623c-117.6314697,0-213.3333435-95.7023926-213.3333435-213.3333435 c0-117.6314545,95.7018661-213.333313,213.3333435-213.333313c117.6357422,0,213.3333435,95.7018661,213.3333435,213.333313 C469.3333435,373.6314697,373.6357422,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/>
|
||||
</g>
|
||||
<g>
|
||||
<path
|
||||
fill="#007dce"
|
||||
d="
|
||||
M386.6,277.5
|
||||
c-16.3-16.2-42.7-16.2-58.9,0
|
||||
c-2.3,2.3-4.2,4.8-5.9,7.4
|
||||
c0,0,0,0,0,0
|
||||
c-0.1,0.2-0.1,0.3-0.2,0.5
|
||||
|
||||
c-21.4-2.3-53.7,4.9-74.4,44.4
|
||||
c-13.7,26-35.8,44.3-57.9,47.7
|
||||
c-9.4,1.4-22.9,0.7-34.4-10.7
|
||||
c-14.8-14.7-21.6-37.1-16.7-54.4
|
||||
|
||||
c1.8-6.4,6.2-14.5,16.4-16.8
|
||||
c21.2-4.9,38.3-13,48.7-18.9
|
||||
c0.3,0.3,0.6,0.5,0.9,0.8
|
||||
c5.5,4,11.7,7.1,18.6,9.1
|
||||
|
||||
c6.3,1.9,12.8,0.5,17.6-3.2
|
||||
c37.1,1.9,77.8-20.8,80.1-22.1
|
||||
c4.8-2.8,7-8.4,5.4-13.5
|
||||
c0,0,0,0,0,0
|
||||
l-3-4.2
|
||||
l3,4.2
|
||||
|
||||
c26.4-21,29.7-41.6,29.2-51.8
|
||||
c0,0,0,0,0,0
|
||||
c2.7-1.9,4.2-5.2,3.5-8.5
|
||||
l-3.4-16.1
|
||||
c-0.9-4.2-4.9-6.6-9-5.2
|
||||
l-13.7,4.3
|
||||
|
||||
c-4.1,1.3-5.8,5.4-3.7,9.2
|
||||
l7.8,14.2
|
||||
c2,3.6,6.7,5.6,10.7,4.8
|
||||
c0,0.1,0,0.1,0,0.2
|
||||
c-0.3,9-4.3,25.5-26,42.8
|
||||
c0,0,0,0-0.1,0
|
||||
|
||||
c-3.6-2.4-8.3-2.8-12.4-0.5
|
||||
c-12.2,6.8-39.5,18.8-62.7,19.2
|
||||
c-2.2-5-6.4-9.1-12.1-10.8
|
||||
c-3.7-1.1-6.7-2.8-9.2-5.2
|
||||
l-0.5-0.5
|
||||
|
||||
c-2.5-2.6-4.3-5.8-5.4-9.7
|
||||
c-1.6-5.6-5.5-9.9-10.4-12.2
|
||||
c0.7-22.9,12.5-49.5,19.3-61.4
|
||||
c2.2-3.9,1.9-8.5-0.4-12
|
||||
l-0.7-0.5
|
||||
l0.7,0.5
|
||||
c-0.1-0.1-0.1-0.2-0.2-0.3
|
||||
c17.5-21.8,34.1-25.7,43.2-26
|
||||
v0
|
||||
c0,0,0,0,0,0
|
||||
c-0.9,4,1.1,8.7,4.8,10.7
|
||||
l14.3,7.8
|
||||
c3.8,2,7.9,0.4,9.3-3.7
|
||||
|
||||
l4.3-13.6
|
||||
c1.3-4.1-1-8.1-5.3-9
|
||||
l-16.1-3.4
|
||||
c-3.2-0.7-6.5,0.8-8.4,3.3
|
||||
c-0.1,0.1-0.1,0.1-0.2,0.2
|
||||
c-10.2-0.4-31,2.7-52.1,29.1
|
||||
l-0.1,0.1
|
||||
|
||||
c-5.1-1.6-10.8,0.6-13.6,5.4
|
||||
c-1.3,2.3-23.3,41.5-22.2,77.9
|
||||
c-4,4.8-5.7,11.3-3.8,17.7
|
||||
c1.3,4.4,2.9,8.5,4.9,12.3
|
||||
|
||||
c0.4,0.6,0.9,1.2,1.3,1.9
|
||||
c-9,4.6-22.6,10.4-38.4,14
|
||||
c-16.4,3.8-28.7,15.8-33.7,33.1
|
||||
c-7.3,25.5,1.8,56.6,22.6,77.3
|
||||
|
||||
c14.3,14.3,33.7,20.4,54.5,17.2
|
||||
c29.7-4.6,57.8-27,75.1-59.9
|
||||
c15.2-29,36-32.9,48.2-32.2
|
||||
c0.4,9,3.4,18,9.4,25.4
|
||||
|
||||
c0.7,0.8,1.4,1.6,2.2,2.4
|
||||
c16.3,16.2,42.7,16.2,58.9,0
|
||||
C402.9,319.9,402.9,293.7,386.6,277.5
|
||||
z
|
||||
|
||||
M368.7,318.2
|
||||
c-6.4,6.3-16.7,6.3-23,0
|
||||
|
||||
c-6.3-6.3-6.3-16.6,0-22.9
|
||||
c6.4-6.3,16.7-6.3,23,0
|
||||
C375,301.7,375,311.9,368.7,318.2
|
||||
z"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.2 KiB |
|
|
@ -1 +0,0 @@
|
|||
<?xml version="1.0" ?><svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><g><g><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="0.0000038" x2="512" y1="256" y2="256"><stop offset="0" style="stop-color:#00AEEE"/><stop offset="1" style="stop-color:#0095DA"/></linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666679" x2="469.3333435" y1="256.0005188" y2="256.0005188"><stop offset="0" style="stop-color:#0095DA"/><stop offset="1" style="stop-color:#00AEEE"/></linearGradient><path d="M256,469.3338623c-117.6314697,0-213.3333282-95.7023926-213.3333282-213.3333435 c0-117.6314545,95.7018585-213.333313,213.3333282-213.333313c117.636261,0,213.3333435,95.7018661,213.3333435,213.333313 C469.3333435,373.6314697,373.636261,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/></g><path d="M360.2250671,330.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C315.2849121,166.9605255,292.3237305,144,264,144 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842865,51.0314636v46.6655884h-91.2256165 v37.9413452h-21.5375977V384h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256165v27.9413452h-21.5376129V384h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H328.687561V384h53.0761719v-53.0773315H360.2250671z" opacity="0.3"/><path d="M352.2250671,322.9226685v-37.9413452h-91.2255859v-46.6655884 c25.976532-2.5151978,46.2854309-24.3957367,46.2854309-51.0314636C307.2849121,158.9605255,284.3237305,136,256,136 s-51.2848053,22.9605255-51.2848053,51.2842712c0,26.6357269,20.3088074,48.5162659,46.2842712,51.0314636v46.6655884h-91.2256012 v37.9413452h-21.5375977V376h53.0762634v-53.0773315h-21.5386658v-27.9413452h81.2256012v27.9413452h-21.5375977V376h53.0763702 v-53.0773315h-21.5387573v-27.9413452h81.2255859v27.9413452H320.687561V376h53.0761719v-53.0773315H352.2250671z" fill="#FFFFFF"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 2.1 KiB |
|
|
@ -1 +0,0 @@
|
|||
<?xml version="1.0" ?><svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><g><g><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="0.0000038" x2="512" y1="256" y2="256"><stop offset="0" style="stop-color:#F35CB5"/><stop offset="1" style="stop-color:#EC008C"/></linearGradient><circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/><linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666679" x2="469.3333435" y1="256.0005188" y2="256.0005188"><stop offset="0" style="stop-color:#EC008C"/><stop offset="1" style="stop-color:#F35CB5"/></linearGradient><path d="M256,469.3338623c-117.6314697,0-213.3333282-95.7018738-213.3333282-213.3333435 c0-117.6309204,95.7018585-213.333313,213.3333282-213.333313c117.6357422,0,213.3333435,95.7024002,213.3333435,213.333313 C469.3333435,373.6319885,373.6357422,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/></g><g><polygon opacity="0.3" points="341.5125427,245.7781372 334.3989258,238.6645355 269.0289001,304.0325317 269.0289001,148.352005 258.9679871,148.352005 258.9679871,304.0325317 193.6026611,238.6645355 186.4895935,245.7781372 263.9984131,323.2896118 "/><polygon opacity="0.3" points="337.4869385,265.3551941 264.3935852,339.2314758 190.517334,265.3551941 144,265.3551941 144,379.6485291 384,379.6485291 384,265.3551941 "/></g><g><polygon fill="#FFFFFF" points="333.5125427,237.7781372 326.3989258,230.6645355 261.0289001,296.0325317 261.0289001,140.352005 250.9680023,140.352005 250.9680023,296.0325317 185.6026611,230.6645355 178.4896088,237.7781372 255.9983978,315.2896118 "/><polygon fill="#FFFFFF" points="329.4869385,257.3551941 256.3935852,331.2314758 182.517334,257.3551941 136,257.3551941 136,371.6485291 376,371.6485291 376,257.3551941 "/></g></g></svg>
|
||||
|
Before Width: | Height: | Size: 1.9 KiB |
136
res/ftrack/action_icons/Thumbnail.svg
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
<?xml version="1.0" ?>
|
||||
<svg enable-background="new 0 0 512 512" id="Layer_1" version="1.1" viewBox="0 0 512 512" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g>
|
||||
<g>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_1_" x1="-0.0000027" x2="512" y1="256" y2="256">
|
||||
<stop offset="0" style="stop-color:#000"/>
|
||||
<stop offset="1" style="stop-color:#456"/>
|
||||
</linearGradient>
|
||||
<circle cx="256" cy="256" fill="url(#SVGID_1_)" r="256"/>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" id="SVGID_2_" x1="42.6666641" x2="469.3333435" y1="256.0005188" y2="256.0005188">
|
||||
<stop offset="0" style="stop-color:#456"/>
|
||||
<stop offset="1" style="stop-color:#000"/>
|
||||
</linearGradient>
|
||||
<path d="M256,469.3338623c-117.6314697,0-213.3333435-95.7023926-213.3333435-213.3333435 c0-117.6314545,95.7018661-213.333313,213.3333435-213.333313c117.636261,0,213.3333435,95.7018661,213.3333435,213.333313 C469.3333435,373.6314697,373.636261,469.3338623,256,469.3338623z" fill="url(#SVGID_2_)"/>
|
||||
</g>
|
||||
<g id="cover-shadow"
|
||||
transform="translate(124.00 124.00) scale(2)">
|
||||
<g transform="translate(-8 -8) scale(1.15)">
|
||||
<path id="Selection"
|
||||
fill="#fff"
|
||||
d="
|
||||
M 0.00,0.00
|
||||
C 0.00,0.00 0.00,24.00 0.00,24.00
|
||||
0.00,24.00 8.00,24.00 8.00,24.00
|
||||
8.00,24.00 8.00,8.00 8.00,8.00
|
||||
8.00,8.00 24.00,8.00 24.00,8.00
|
||||
24.00,8.00 24.00,0.00 24.00,0.00
|
||||
24.00,0.00 0.00,0.00 0.00,0.00
|
||||
Z
|
||||
M 40.00,0.00
|
||||
C 40.00,0.00 40.00,8.00 40.00,8.00
|
||||
40.00,8.00 56.00,8.00 56.00,8.00
|
||||
56.00,8.00 56.00,0.00 56.00,0.00
|
||||
56.00,0.00 40.00,0.00 40.00,0.00
|
||||
Z
|
||||
M 72.00,0.00
|
||||
C 72.00,0.00 72.00,8.00 72.00,8.00
|
||||
72.00,8.00 88.00,8.00 88.00,8.00
|
||||
88.00,8.00 88.00,0.00 88.00,0.00
|
||||
88.00,0.00 72.00,0.00 72.00,0.00
|
||||
Z
|
||||
M 104.00,0.00
|
||||
C 104.00,0.00 104.00,8.00 104.00,8.00
|
||||
104.00,8.00 120.00,8.00 120.00,8.00
|
||||
120.00,8.00 120.00,24.00 120.00,24.00
|
||||
120.00,24.00 128.00,24.00 128.00,24.00
|
||||
128.00,24.00 128.00,0.00 128.00,0.00
|
||||
128.00,0.00 104.00,0.00 104.00,0.00
|
||||
Z
|
||||
M 0.00,40.00
|
||||
C 0.00,40.00 0.00,56.00 0.00,56.00
|
||||
0.00,56.00 8.00,56.00 8.00,56.00
|
||||
8.00,56.00 8.00,40.00 8.00,40.00
|
||||
8.00,40.00 0.00,40.00 0.00,40.00
|
||||
Z
|
||||
M 120.00,40.00
|
||||
C 120.00,40.00 120.00,56.00 120.00,56.00
|
||||
120.00,56.00 128.00,56.00 128.00,56.00
|
||||
128.00,56.00 128.00,40.00 128.00,40.00
|
||||
128.00,40.00 120.00,40.00 120.00,40.00
|
||||
Z
|
||||
M 0.00,72.00
|
||||
C 0.00,72.00 0.00,88.00 0.00,88.00
|
||||
0.00,88.00 8.00,88.00 8.00,88.00
|
||||
8.00,88.00 8.00,72.00 8.00,72.00
|
||||
8.00,72.00 0.00,72.00 0.00,72.00
|
||||
Z
|
||||
M 120.00,72.00
|
||||
C 120.00,72.00 120.00,88.00 120.00,88.00
|
||||
120.00,88.00 128.00,88.00 128.00,88.00
|
||||
128.00,88.00 128.00,72.00 128.00,72.00
|
||||
128.00,72.00 120.00,72.00 120.00,72.00
|
||||
Z
|
||||
M 0.00,104.00
|
||||
C 0.00,104.00 0.00,128.00 0.00,128.00
|
||||
0.00,128.00 16.00,128.00 16.00,128.00
|
||||
16.00,128.00 16.00,120.00 16.00,120.00
|
||||
16.00,120.00 8.00,120.00 8.00,120.00
|
||||
8.00,120.00 8.00,104.00 8.00,104.00
|
||||
8.00,104.00 0.00,104.00 0.00,104.00
|
||||
Z
|
||||
M 120.00,104.00
|
||||
C 120.00,104.00 120.00,120.00 120.00,120.00
|
||||
120.00,120.00 104.00,120.00 104.00,120.00
|
||||
104.00,120.00 104.00,128.00 104.00,128.00
|
||||
104.00,128.00 128.00,128.00 128.00,128.00
|
||||
128.00,128.00 128.00,104.00 128.00,104.00
|
||||
128.00,104.00 120.00,104.00 120.00,104.00
|
||||
Z
|
||||
M 40.00,120.00
|
||||
C 40.00,120.00 40.00,128.00 40.00,128.00
|
||||
40.00,128.00 56.00,128.00 56.00,128.00
|
||||
56.00,128.00 56.00,120.00 56.00,120.00
|
||||
56.00,120.00 40.00,120.00 40.00,120.00
|
||||
Z
|
||||
M 72.00,120.00
|
||||
C 72.00,120.00 72.00,128.00 72.00,128.00
|
||||
72.00,128.00 88.00,128.00 88.00,128.00
|
||||
88.00,128.00 88.00,120.00 88.00,120.00
|
||||
88.00,120.00 72.00,120.00 72.00,120.00
|
||||
Z"
|
||||
/>
|
||||
</g>
|
||||
<g transform="translate(-21.00 24.00)">
|
||||
<path id="Selection"
|
||||
fill="#fff"
|
||||
d="
|
||||
M 48.00,24.00
|
||||
C 48.00,24.00 48.00,64.00 48.00,64.00
|
||||
48.00,64.00 24.00,64.00 24.00,64.00
|
||||
24.00,64.00 64.00,102.80 64.00,102.80
|
||||
64.00,102.80 104.00,64.00 104.00,64.00
|
||||
104.00,64.00 80.00,64.00 80.00,64.00
|
||||
80.00,64.00 80.00,24.00 80.00,24.00
|
||||
80.00,24.00 48.00,24.00 48.00,24.00
|
||||
Z"
|
||||
/>
|
||||
</g>
|
||||
<g transform="translate(24.00 -21.00) scale(0.25)">
|
||||
<path id="Selection"
|
||||
fill="#fff"
|
||||
d="
|
||||
M 96.00,256.00
|
||||
C 96.00,256.00 192.00,256.00 192.00,256.00
|
||||
192.00,256.00 192.00,416.00 192.00,416.00
|
||||
192.00,416.00 320.00,416.00 320.00,416.00
|
||||
320.00,416.00 320.00,256.00 320.00,256.00
|
||||
320.00,256.00 416.00,256.00 416.00,256.00
|
||||
416.00,256.00 256.00,97.20 256.00,97.20
|
||||
256.00,97.20 96.00,256.00 96.00,256.00
|
||||
Z"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 6.8 KiB |