mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge remote-tracking branch 'origin/develop' into develop
This commit is contained in:
commit
b9521d73eb
201 changed files with 4667 additions and 852 deletions
|
|
@ -1,6 +1,7 @@
|
|||
from .settings import (
|
||||
system_settings,
|
||||
project_settings
|
||||
project_settings,
|
||||
environments
|
||||
)
|
||||
from pypeapp import (
|
||||
Logger,
|
||||
|
|
@ -55,6 +56,7 @@ from .lib import _subprocess as subprocess
|
|||
__all__ = [
|
||||
"system_settings",
|
||||
"project_settings",
|
||||
"environments",
|
||||
|
||||
"Logger",
|
||||
"Anatomy",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from .base import PypeModule
|
||||
|
||||
__all__ = (
|
||||
"PypeModule",
|
||||
)
|
||||
|
|
@ -1,16 +1,27 @@
|
|||
from Qt import QtWidgets
|
||||
from avalon.tools import libraryloader
|
||||
from pype.api import Logger
|
||||
from pype.tools.launcher import LauncherWindow, actions
|
||||
|
||||
|
||||
class AvalonApps:
|
||||
def __init__(self, main_parent=None, parent=None):
|
||||
self.log = Logger().get_logger(__name__)
|
||||
self.main_parent = main_parent
|
||||
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
from avalon.tools.libraryloader import app
|
||||
from avalon import style
|
||||
from pype.tools.launcher import LauncherWindow, actions
|
||||
|
||||
self.parent = parent
|
||||
self.main_parent = main_parent
|
||||
|
||||
self.app_launcher = LauncherWindow()
|
||||
self.libraryloader = app.Window(
|
||||
icon=self.parent.icon,
|
||||
show_projects=True,
|
||||
show_libraries=True
|
||||
)
|
||||
self.libraryloader.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# actions.register_default_actions()
|
||||
actions.register_config_actions()
|
||||
|
|
@ -23,6 +34,7 @@ class AvalonApps:
|
|||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu=None):
|
||||
from Qt import QtWidgets
|
||||
# Actions
|
||||
if parent_menu is None:
|
||||
if self.parent is None:
|
||||
|
|
@ -52,9 +64,11 @@ class AvalonApps:
|
|||
self.app_launcher.activateWindow()
|
||||
|
||||
def show_library_loader(self):
|
||||
libraryloader.show(
|
||||
parent=self.main_parent,
|
||||
icon=self.parent.icon,
|
||||
show_projects=True,
|
||||
show_libraries=True
|
||||
)
|
||||
self.libraryloader.show()
|
||||
|
||||
# Raise and activate the window
|
||||
# for MacOS
|
||||
self.libraryloader.raise_()
|
||||
# for Windows
|
||||
self.libraryloader.activateWindow()
|
||||
self.libraryloader.refresh()
|
||||
|
|
|
|||
38
pype/modules/base.py
Normal file
38
pype/modules/base.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Base class for Pype Modules."""
|
||||
from uuid import uuid4
|
||||
from abc import ABC, abstractmethod
|
||||
from pype.api import Logger
|
||||
|
||||
|
||||
class PypeModule(ABC):
|
||||
"""Base class of pype module.
|
||||
|
||||
Attributes:
|
||||
id (UUID): Module id.
|
||||
enabled (bool): Is module enabled.
|
||||
name (str): Module name.
|
||||
"""
|
||||
|
||||
enabled = False
|
||||
name = None
|
||||
_id = None
|
||||
|
||||
def __init__(self, settings):
|
||||
if self.name is None:
|
||||
self.name = self.__class__.__name__
|
||||
|
||||
self.log = Logger().get_logger(self.name)
|
||||
|
||||
self.settings = settings.get(self.name)
|
||||
self.enabled = settings.get("enabled", False)
|
||||
self._id = uuid4()
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self._id
|
||||
|
||||
@abstractmethod
|
||||
def startup_environments(self):
|
||||
"""Get startup environments for module."""
|
||||
return {}
|
||||
|
|
@ -1,9 +1,8 @@
|
|||
import os
|
||||
import threading
|
||||
import time
|
||||
|
||||
from pype.api import Logger
|
||||
from avalon import style
|
||||
from Qt import QtWidgets
|
||||
from .widgets import ClockifySettings, MessageWidget
|
||||
from .clockify_api import ClockifyAPI
|
||||
from .constants import CLOCKIFY_FTRACK_USER_PATH
|
||||
|
||||
|
|
@ -17,11 +16,21 @@ class ClockifyModule:
|
|||
|
||||
os.environ["CLOCKIFY_WORKSPACE"] = self.workspace_name
|
||||
|
||||
self.timer_manager = None
|
||||
self.MessageWidgetClass = None
|
||||
|
||||
self.clockapi = ClockifyAPI(master_parent=self)
|
||||
|
||||
self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
from .widgets import ClockifySettings, MessageWidget
|
||||
|
||||
self.MessageWidgetClass = MessageWidget
|
||||
|
||||
self.main_parent = main_parent
|
||||
self.parent = parent
|
||||
self.clockapi = ClockifyAPI(master_parent=self)
|
||||
self.message_widget = None
|
||||
self.widget_settings = ClockifySettings(main_parent, self)
|
||||
self.widget_settings_required = None
|
||||
|
|
@ -57,11 +66,10 @@ class ClockifyModule:
|
|||
)
|
||||
|
||||
if 'AvalonApps' in modules:
|
||||
from launcher import lib
|
||||
actions_path = os.path.sep.join([
|
||||
actions_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'launcher_actions'
|
||||
])
|
||||
)
|
||||
current = os.environ.get('AVALON_ACTIONS', '')
|
||||
if current:
|
||||
current += os.pathsep
|
||||
|
|
@ -78,12 +86,12 @@ class ClockifyModule:
|
|||
self.stop_timer()
|
||||
|
||||
def timer_started(self, data):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if self.timer_manager:
|
||||
self.timer_manager.start_timers(data)
|
||||
|
||||
def timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if self.timer_manager:
|
||||
self.timer_manager.stop_timers()
|
||||
|
||||
def start_timer_check(self):
|
||||
|
|
@ -102,7 +110,7 @@ class ClockifyModule:
|
|||
self.thread_timer_check = None
|
||||
|
||||
def check_running(self):
|
||||
import time
|
||||
|
||||
while self.bool_thread_check_running is True:
|
||||
bool_timer_run = False
|
||||
if self.clockapi.get_in_progress() is not None:
|
||||
|
|
@ -156,15 +164,14 @@ class ClockifyModule:
|
|||
self.timer_stopped()
|
||||
|
||||
def signed_in(self):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if not self.timer_manager:
|
||||
return
|
||||
if not self.timer_manager:
|
||||
return
|
||||
|
||||
if not self.timer_manager.last_task:
|
||||
return
|
||||
if not self.timer_manager.last_task:
|
||||
return
|
||||
|
||||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
|
||||
def start_timer(self, input_data):
|
||||
# If not api key is not entered then skip
|
||||
|
|
@ -197,11 +204,14 @@ class ClockifyModule:
|
|||
"<br><br>Please inform your Project Manager."
|
||||
).format(project_name, str(self.clockapi.workspace_name))
|
||||
|
||||
self.message_widget = MessageWidget(
|
||||
self.main_parent, msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(self.on_message_widget_close)
|
||||
self.message_widget.show()
|
||||
if self.MessageWidgetClass:
|
||||
self.message_widget = self.MessageWidgetClass(
|
||||
self.main_parent, msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(
|
||||
self.on_message_widget_close
|
||||
)
|
||||
self.message_widget.show()
|
||||
|
||||
return
|
||||
|
||||
|
|
@ -227,31 +237,29 @@ class ClockifyModule:
|
|||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
self.menu = QtWidgets.QMenu('Clockify', parent_menu)
|
||||
self.menu.setProperty('submenu', 'on')
|
||||
self.menu.setStyleSheet(style.load_stylesheet())
|
||||
from Qt import QtWidgets
|
||||
menu = QtWidgets.QMenu("Clockify", parent_menu)
|
||||
menu.setProperty("submenu", "on")
|
||||
|
||||
# Actions
|
||||
self.aShowSettings = QtWidgets.QAction(
|
||||
"Settings", self.menu
|
||||
)
|
||||
self.aStopTimer = QtWidgets.QAction(
|
||||
"Stop timer", self.menu
|
||||
)
|
||||
action_show_settings = QtWidgets.QAction("Settings", menu)
|
||||
action_stop_timer = QtWidgets.QAction("Stop timer", menu)
|
||||
|
||||
self.menu.addAction(self.aShowSettings)
|
||||
self.menu.addAction(self.aStopTimer)
|
||||
menu.addAction(action_show_settings)
|
||||
menu.addAction(action_stop_timer)
|
||||
|
||||
self.aShowSettings.triggered.connect(self.show_settings)
|
||||
self.aStopTimer.triggered.connect(self.stop_timer)
|
||||
action_show_settings.triggered.connect(self.show_settings)
|
||||
action_stop_timer.triggered.connect(self.stop_timer)
|
||||
|
||||
self.action_stop_timer = action_stop_timer
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
parent_menu.addMenu(self.menu)
|
||||
parent_menu.addMenu(menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
self.widget_settings.show()
|
||||
|
||||
def set_menu_visibility(self):
|
||||
self.aStopTimer.setVisible(self.bool_timer_run)
|
||||
self.action_stop_timer.setVisible(self.bool_timer_run)
|
||||
|
|
|
|||
|
|
@ -1,2 +1,12 @@
|
|||
from .lib import *
|
||||
from . import ftrack_server
|
||||
from .ftrack_server import FtrackServer, check_ftrack_url
|
||||
from .lib import BaseHandler, BaseEvent, BaseAction
|
||||
|
||||
__all__ = (
|
||||
"ftrack_server",
|
||||
"FtrackServer",
|
||||
"check_ftrack_url",
|
||||
"BaseHandler",
|
||||
"BaseEvent",
|
||||
"BaseAction"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,2 +1,8 @@
|
|||
from .ftrack_server import FtrackServer
|
||||
from .lib import check_ftrack_url
|
||||
|
||||
|
||||
__all__ = (
|
||||
"FtrackServer",
|
||||
"check_ftrack_url"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -16,9 +16,9 @@ import pymongo
|
|||
from pype.api import decompose_url
|
||||
|
||||
|
||||
class NotActiveTable(Exception):
|
||||
class NotActiveCollection(Exception):
|
||||
def __init__(self, *args, **kwargs):
|
||||
msg = "Active table is not set. (This is bug)"
|
||||
msg = "Active collection is not set. (This is bug)"
|
||||
if not (args or kwargs):
|
||||
args = [msg]
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
@ -40,12 +40,12 @@ def auto_reconnect(func):
|
|||
return decorated
|
||||
|
||||
|
||||
def check_active_table(func):
|
||||
def check_active_collection(func):
|
||||
"""Check if CustomDbConnector has active collection."""
|
||||
@functools.wraps(func)
|
||||
def decorated(obj, *args, **kwargs):
|
||||
if not obj.active_table:
|
||||
raise NotActiveTable()
|
||||
if not obj.active_collection:
|
||||
raise NotActiveCollection()
|
||||
return func(obj, *args, **kwargs)
|
||||
return decorated
|
||||
|
||||
|
|
@ -55,7 +55,7 @@ class CustomDbConnector:
|
|||
timeout = int(os.environ["AVALON_TIMEOUT"])
|
||||
|
||||
def __init__(
|
||||
self, uri, database_name, port=None, table_name=None
|
||||
self, uri, database_name, port=None, collection_name=None
|
||||
):
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
|
|
@ -76,10 +76,10 @@ class CustomDbConnector:
|
|||
self._port = port
|
||||
self._database_name = database_name
|
||||
|
||||
self.active_table = table_name
|
||||
self.active_collection = collection_name
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
# gives direct access to collection withou setting `active_collection`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
|
|
@ -88,9 +88,11 @@ class CustomDbConnector:
|
|||
try:
|
||||
return super(CustomDbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
if self.active_table is None:
|
||||
raise NotActiveTable()
|
||||
return self._database[self.active_table].__getattribute__(attr)
|
||||
if self.active_collection is None:
|
||||
raise NotActiveCollection()
|
||||
return self._database[self.active_collection].__getattribute__(
|
||||
attr
|
||||
)
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
|
|
@ -146,46 +148,30 @@ class CustomDbConnector:
|
|||
self._is_installed = False
|
||||
atexit.unregister(self.uninstall)
|
||||
|
||||
def create_table(self, name, **options):
|
||||
if self.exist_table(name):
|
||||
def collection_exists(self, collection_name):
|
||||
return collection_name in self.collections()
|
||||
|
||||
def create_collection(self, name, **options):
|
||||
if self.collection_exists(name):
|
||||
return
|
||||
|
||||
return self._database.create_collection(name, **options)
|
||||
|
||||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def create_table(self, name, **options):
|
||||
if self.exist_table(name):
|
||||
return
|
||||
|
||||
return self._database.create_collection(name, **options)
|
||||
|
||||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def tables(self):
|
||||
"""List available tables
|
||||
Returns:
|
||||
list of table names
|
||||
"""
|
||||
collection_names = self.collections()
|
||||
for table_name in collection_names:
|
||||
if table_name in ("system.indexes",):
|
||||
continue
|
||||
yield table_name
|
||||
|
||||
@auto_reconnect
|
||||
def collections(self):
|
||||
return self._database.collection_names()
|
||||
for col_name in self._database.collection_names():
|
||||
if col_name not in ("system.indexes",):
|
||||
yield col_name
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def insert_one(self, item, **options):
|
||||
assert isinstance(item, dict), "item must be of type <dict>"
|
||||
return self._database[self.active_table].insert_one(item, **options)
|
||||
return self._database[self.active_collection].insert_one(
|
||||
item, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def insert_many(self, items, ordered=True, **options):
|
||||
# check if all items are valid
|
||||
|
|
@ -194,72 +180,74 @@ class CustomDbConnector:
|
|||
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||
|
||||
options["ordered"] = ordered
|
||||
return self._database[self.active_table].insert_many(items, **options)
|
||||
return self._database[self.active_collection].insert_many(
|
||||
items, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None, **options):
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find(
|
||||
return self._database[self.active_collection].find(
|
||||
filter, projection, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None, **options):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find_one(
|
||||
return self._database[self.active_collection].find_one(
|
||||
filter,
|
||||
projection,
|
||||
**options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def replace_one(self, filter, replacement, **options):
|
||||
return self._database[self.active_table].replace_one(
|
||||
return self._database[self.active_collection].replace_one(
|
||||
filter, replacement, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def update_one(self, filter, update, **options):
|
||||
return self._database[self.active_table].update_one(
|
||||
return self._database[self.active_collection].update_one(
|
||||
filter, update, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def update_many(self, filter, update, **options):
|
||||
return self._database[self.active_table].update_many(
|
||||
return self._database[self.active_collection].update_many(
|
||||
filter, update, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def distinct(self, **options):
|
||||
return self._database[self.active_table].distinct(**options)
|
||||
return self._database[self.active_collection].distinct(**options)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def drop_collection(self, name_or_collection, **options):
|
||||
return self._database[self.active_table].drop(
|
||||
return self._database[self.active_collection].drop(
|
||||
name_or_collection, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def delete_one(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_one(
|
||||
return self._database[self.active_collection].delete_one(
|
||||
filter, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def delete_many(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_many(
|
||||
return self._database[self.active_collection].delete_many(
|
||||
filter, **options
|
||||
)
|
||||
|
|
@ -26,7 +26,7 @@ from pype.api import (
|
|||
compose_url
|
||||
)
|
||||
|
||||
from pype.modules.ftrack.lib.custom_db_connector import CustomDbConnector
|
||||
from .custom_db_connector import CustomDbConnector
|
||||
|
||||
|
||||
TOPIC_STATUS_SERVER = "pype.event.server.status"
|
||||
|
|
@ -153,9 +153,9 @@ class StorerEventHub(SocketBaseEventHub):
|
|||
class ProcessEventHub(SocketBaseEventHub):
|
||||
|
||||
hearbeat_msg = b"processor"
|
||||
uri, port, database, table_name = get_ftrack_event_mongo_info()
|
||||
uri, port, database, collection_name = get_ftrack_event_mongo_info()
|
||||
|
||||
is_table_created = False
|
||||
is_collection_created = False
|
||||
pypelog = Logger().get_logger("Session Processor")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -163,7 +163,7 @@ class ProcessEventHub(SocketBaseEventHub):
|
|||
self.uri,
|
||||
self.database,
|
||||
self.port,
|
||||
self.table_name
|
||||
self.collection_name
|
||||
)
|
||||
super(ProcessEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
|
|
@ -184,7 +184,7 @@ class ProcessEventHub(SocketBaseEventHub):
|
|||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(self.database, self.table_name))
|
||||
).format(self.database, self.collection_name))
|
||||
self.sock.sendall(b"MongoError")
|
||||
sys.exit(0)
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,9 @@ from pype.modules.ftrack.ftrack_server.lib import (
|
|||
get_ftrack_event_mongo_info,
|
||||
TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT
|
||||
)
|
||||
from pype.modules.ftrack.lib.custom_db_connector import CustomDbConnector
|
||||
from pype.modules.ftrack.ftrack_server.custom_db_connector import (
|
||||
CustomDbConnector
|
||||
)
|
||||
from pype.api import Logger
|
||||
|
||||
log = Logger().get_logger("Event storer")
|
||||
|
|
@ -23,8 +25,8 @@ class SessionFactory:
|
|||
session = None
|
||||
|
||||
|
||||
uri, port, database, table_name = get_ftrack_event_mongo_info()
|
||||
dbcon = CustomDbConnector(uri, database, port, table_name)
|
||||
uri, port, database, collection_name = get_ftrack_event_mongo_info()
|
||||
dbcon = CustomDbConnector(uri, database, port, collection_name)
|
||||
|
||||
# ignore_topics = ["ftrack.meta.connected"]
|
||||
ignore_topics = []
|
||||
|
|
@ -200,7 +202,7 @@ def main(args):
|
|||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(database, table_name))
|
||||
).format(database, collection_name))
|
||||
sock.sendall(b"MongoError")
|
||||
|
||||
finally:
|
||||
|
|
|
|||
|
|
@ -1022,7 +1022,7 @@ class SyncEntitiesFactory:
|
|||
continue
|
||||
|
||||
ent_path_items = [ent["name"] for ent in entity["link"]]
|
||||
parents = ent_path_items[1:len(ent_path_items)-1:]
|
||||
parents = ent_path_items[1:len(ent_path_items) - 1:]
|
||||
hierarchy = ""
|
||||
if len(parents) > 0:
|
||||
hierarchy = os.path.sep.join(parents)
|
||||
|
|
@ -1141,7 +1141,7 @@ class SyncEntitiesFactory:
|
|||
if not is_right and not else_match_better:
|
||||
entity = entity_dict["entity"]
|
||||
ent_path_items = [ent["name"] for ent in entity["link"]]
|
||||
parents = ent_path_items[1:len(ent_path_items)-1:]
|
||||
parents = ent_path_items[1:len(ent_path_items) - 1:]
|
||||
av_parents = av_ent_by_mongo_id["data"]["parents"]
|
||||
if av_parents == parents:
|
||||
is_right = True
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import functools
|
|||
import time
|
||||
from pype.api import Logger
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack.ftrack_server.lib import SocketSession
|
||||
from pype.modules.ftrack import ftrack_server
|
||||
|
||||
|
||||
class MissingPermision(Exception):
|
||||
|
|
@ -41,7 +41,7 @@ class BaseHandler(object):
|
|||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
if not(
|
||||
isinstance(session, ftrack_api.session.Session) or
|
||||
isinstance(session, SocketSession)
|
||||
isinstance(session, ftrack_server.lib.SocketSession)
|
||||
):
|
||||
raise Exception((
|
||||
"Session object entered with args is instance of \"{}\""
|
||||
|
|
@ -49,7 +49,7 @@ class BaseHandler(object):
|
|||
).format(
|
||||
str(type(session)),
|
||||
str(ftrack_api.session.Session),
|
||||
str(SocketSession)
|
||||
str(ftrack_server.lib.SocketSession)
|
||||
))
|
||||
|
||||
self._session = session
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import requests
|
||||
from avalon import style
|
||||
from pype.modules.ftrack import credentials
|
||||
from pype.modules.ftrack.lib import credentials
|
||||
from . import login_tools
|
||||
from pype.api import resources
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
|
@ -238,6 +238,8 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
|
||||
# If there is an existing server thread running we need to stop it.
|
||||
if self._login_server_thread:
|
||||
if self._login_server_thread.isAlive():
|
||||
self._login_server_thread.stop()
|
||||
self._login_server_thread.join()
|
||||
self._login_server_thread = None
|
||||
|
||||
|
|
|
|||
|
|
@ -61,12 +61,17 @@ class LoginServerThread(threading.Thread):
|
|||
def __init__(self, url, callback):
|
||||
self.url = url
|
||||
self.callback = callback
|
||||
self._server = None
|
||||
super(LoginServerThread, self).__init__()
|
||||
|
||||
def _handle_login(self, api_user, api_key):
|
||||
'''Login to server with *api_user* and *api_key*.'''
|
||||
self.callback(api_user, api_key)
|
||||
|
||||
def stop(self):
|
||||
if self._server:
|
||||
self._server.server_close()
|
||||
|
||||
def run(self):
|
||||
'''Listen for events.'''
|
||||
self._server = HTTPServer(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
from Qt import QtWidgets
|
||||
from pype.api import Logger
|
||||
from ..gui.app import LogsWindow
|
||||
|
||||
|
||||
class LoggingModule:
|
||||
|
|
@ -8,7 +6,13 @@ class LoggingModule:
|
|||
self.parent = parent
|
||||
self.log = Logger().get_logger(self.__class__.__name__, "logging")
|
||||
|
||||
self.window = None
|
||||
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
try:
|
||||
from .gui.app import LogsWindow
|
||||
self.window = LogsWindow()
|
||||
self.tray_menu = self._tray_menu
|
||||
except Exception:
|
||||
|
|
@ -18,12 +22,12 @@ class LoggingModule:
|
|||
|
||||
# Definition of Tray menu
|
||||
def _tray_menu(self, parent_menu):
|
||||
from Qt import QtWidgets
|
||||
# Menu for Tray App
|
||||
menu = QtWidgets.QMenu('Logging', parent_menu)
|
||||
# menu.setProperty('submenu', 'on')
|
||||
|
||||
show_action = QtWidgets.QAction("Show Logs", menu)
|
||||
show_action.triggered.connect(self.on_show_logs)
|
||||
show_action.triggered.connect(self._show_logs_gui)
|
||||
menu.addAction(show_action)
|
||||
|
||||
parent_menu.addMenu(menu)
|
||||
|
|
@ -34,5 +38,6 @@ class LoggingModule:
|
|||
def process_modules(self, modules):
|
||||
return
|
||||
|
||||
def on_show_logs(self):
|
||||
self.window.show()
|
||||
def _show_logs_gui(self):
|
||||
if self.window:
|
||||
self.window.show()
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
import appdirs
|
||||
from avalon import style
|
||||
from Qt import QtWidgets
|
||||
import os
|
||||
import json
|
||||
from .widget_login import MusterLogin
|
||||
from avalon.vendor import requests
|
||||
import appdirs
|
||||
import requests
|
||||
|
||||
|
||||
class MusterModule:
|
||||
|
|
@ -21,6 +18,11 @@ class MusterModule:
|
|||
self.cred_path = os.path.join(
|
||||
self.cred_folder_path, self.cred_filename
|
||||
)
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
from .widget_login import MusterLogin
|
||||
|
||||
self.main_parent = main_parent
|
||||
self.parent = parent
|
||||
self.widget_login = MusterLogin(main_parent, self)
|
||||
|
|
@ -38,10 +40,6 @@ class MusterModule:
|
|||
pass
|
||||
|
||||
def process_modules(self, modules):
|
||||
|
||||
def api_callback():
|
||||
self.aShowLogin.trigger()
|
||||
|
||||
if "RestApiServer" in modules:
|
||||
def api_show_login():
|
||||
self.aShowLogin.trigger()
|
||||
|
|
@ -51,13 +49,12 @@ class MusterModule:
|
|||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent):
|
||||
"""
|
||||
Add **change credentials** option to tray menu.
|
||||
"""
|
||||
"""Add **change credentials** option to tray menu."""
|
||||
from Qt import QtWidgets
|
||||
|
||||
# Menu for Tray App
|
||||
self.menu = QtWidgets.QMenu('Muster', parent)
|
||||
self.menu.setProperty('submenu', 'on')
|
||||
self.menu.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# Actions
|
||||
self.aShowLogin = QtWidgets.QAction(
|
||||
|
|
@ -91,9 +88,9 @@ class MusterModule:
|
|||
if not MUSTER_REST_URL:
|
||||
raise AttributeError("Muster REST API url not set")
|
||||
params = {
|
||||
'username': username,
|
||||
'password': password
|
||||
}
|
||||
'username': username,
|
||||
'password': password
|
||||
}
|
||||
api_entry = '/api/login'
|
||||
response = self._requests_post(
|
||||
MUSTER_REST_URL + api_entry, params=params)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import socket
|
||||
from Qt import QtCore
|
||||
import threading
|
||||
|
||||
from socketserver import ThreadingMixIn
|
||||
from http.server import HTTPServer
|
||||
|
|
@ -155,14 +155,15 @@ class RestApiServer:
|
|||
def is_running(self):
|
||||
return self.rest_api_thread.is_running
|
||||
|
||||
def tray_exit(self):
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
self.rest_api_thread.is_running = False
|
||||
|
||||
def thread_stopped(self):
|
||||
self._is_running = False
|
||||
self.rest_api_thread.stop()
|
||||
self.rest_api_thread.join()
|
||||
|
||||
|
||||
class RestApiThread(QtCore.QThread):
|
||||
class RestApiThread(threading.Thread):
|
||||
""" Listener for REST requests.
|
||||
|
||||
It is possible to register callbacks for url paths.
|
||||
|
|
@ -174,6 +175,12 @@ class RestApiThread(QtCore.QThread):
|
|||
self.is_running = False
|
||||
self.module = module
|
||||
self.port = port
|
||||
self.httpd = None
|
||||
|
||||
def stop(self):
|
||||
self.is_running = False
|
||||
if self.httpd:
|
||||
self.httpd.server_close()
|
||||
|
||||
def run(self):
|
||||
self.is_running = True
|
||||
|
|
@ -185,12 +192,14 @@ class RestApiThread(QtCore.QThread):
|
|||
)
|
||||
|
||||
with ThreadingSimpleServer(("", self.port), Handler) as httpd:
|
||||
self.httpd = httpd
|
||||
while self.is_running:
|
||||
httpd.handle_request()
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Rest Api Server service has failed", exc_info=True
|
||||
)
|
||||
|
||||
self.httpd = None
|
||||
self.is_running = False
|
||||
self.module.thread_stopped()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import os
|
|||
import sys
|
||||
import subprocess
|
||||
import pype
|
||||
from pype import tools
|
||||
|
||||
|
||||
class StandAlonePublishModule:
|
||||
|
|
@ -30,6 +29,7 @@ class StandAlonePublishModule:
|
|||
))
|
||||
|
||||
def show(self):
|
||||
from pype import tools
|
||||
standalone_publisher_tool_path = os.path.join(
|
||||
os.path.dirname(tools.__file__),
|
||||
"standalonepublish"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from .timers_manager import TimersManager
|
||||
from .widget_user_idle import WidgetUserIdle
|
||||
|
||||
CLASS_DEFINIION = TimersManager
|
||||
|
||||
|
|
|
|||
|
|
@ -1,21 +1,7 @@
|
|||
from .widget_user_idle import WidgetUserIdle, SignalHandler
|
||||
from pype.api import Logger, config
|
||||
from pype.api import Logger
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
""" Signleton implementation
|
||||
"""
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(
|
||||
Singleton, cls
|
||||
).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class TimersManager(metaclass=Singleton):
|
||||
class TimersManager:
|
||||
""" Handles about Timers.
|
||||
|
||||
Should be able to start/stop all timers at once.
|
||||
|
|
@ -41,7 +27,13 @@ class TimersManager(metaclass=Singleton):
|
|||
|
||||
self.idle_man = None
|
||||
self.signal_handler = None
|
||||
|
||||
self.trat_init(tray_widget, main_widget)
|
||||
|
||||
def trat_init(self, tray_widget, main_widget):
|
||||
from .widget_user_idle import WidgetUserIdle, SignalHandler
|
||||
self.widget_user_idle = WidgetUserIdle(self, tray_widget)
|
||||
self.signal_handler = SignalHandler(self)
|
||||
|
||||
def set_signal_times(self):
|
||||
try:
|
||||
|
|
@ -119,7 +111,6 @@ class TimersManager(metaclass=Singleton):
|
|||
"""
|
||||
|
||||
if 'IdleManager' in modules:
|
||||
self.signal_handler = SignalHandler(self)
|
||||
if self.set_signal_times() is True:
|
||||
self.register_to_idle_manager(modules['IdleManager'])
|
||||
|
||||
|
|
|
|||
|
|
@ -3,8 +3,6 @@ import json
|
|||
import getpass
|
||||
|
||||
import appdirs
|
||||
from Qt import QtWidgets
|
||||
from .widget_user import UserWidget
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
|
|
@ -24,6 +22,12 @@ class UserModule:
|
|||
self.cred_path = os.path.normpath(os.path.join(
|
||||
self.cred_folder_path, self.cred_filename
|
||||
))
|
||||
self.widget_login = None
|
||||
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent=None, parent=None):
|
||||
from .widget_user import UserWidget
|
||||
self.widget_login = UserWidget(self)
|
||||
|
||||
self.load_credentials()
|
||||
|
|
@ -66,6 +70,7 @@ class UserModule:
|
|||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
from Qt import QtWidgets
|
||||
"""Add menu or action to Tray(or parent)'s menu"""
|
||||
action = QtWidgets.QAction("Username", parent_menu)
|
||||
action.triggered.connect(self.show_widget)
|
||||
|
|
@ -121,7 +126,8 @@ class UserModule:
|
|||
|
||||
self.cred = {"username": username}
|
||||
os.environ[self.env_name] = username
|
||||
self.widget_login.set_user(username)
|
||||
if self.widget_login:
|
||||
self.widget_login.set_user(username)
|
||||
try:
|
||||
file = open(self.cred_path, "w")
|
||||
file.write(json.dumps(self.cred))
|
||||
|
|
|
|||
|
|
@ -31,12 +31,13 @@ class WebSocketServer():
|
|||
self.client = None
|
||||
self.handlers = {}
|
||||
|
||||
port = None
|
||||
websocket_url = os.getenv("WEBSOCKET_URL")
|
||||
if websocket_url:
|
||||
parsed = urllib.parse.urlparse(websocket_url)
|
||||
port = parsed.port
|
||||
if not port:
|
||||
port = 8099 # fallback
|
||||
port = 8098 # fallback
|
||||
|
||||
self.app = web.Application()
|
||||
|
||||
|
|
|
|||
102
pype/modules_manager.py
Normal file
102
pype/modules_manager.py
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
import os
|
||||
import inspect
|
||||
|
||||
import pype.modules
|
||||
from pype.modules import PypeModule
|
||||
from pype.settings import system_settings
|
||||
from pype.api import Logger
|
||||
|
||||
|
||||
class PypeModuleManager:
|
||||
skip_module_names = ("__pycache__", )
|
||||
|
||||
def __init__(self):
|
||||
self.log = Logger().get_logger(
|
||||
"{}.{}".format(__name__, self.__class__.__name__)
|
||||
)
|
||||
|
||||
self.pype_modules = self.find_pype_modules()
|
||||
|
||||
def modules_environments(self):
|
||||
environments = {}
|
||||
for pype_module in self.pype_modules.values():
|
||||
environments.update(pype_module.startup_environments())
|
||||
return environments
|
||||
|
||||
def find_pype_modules(self):
|
||||
settings = system_settings()
|
||||
modules = []
|
||||
dirpath = os.path.dirname(pype.modules.__file__)
|
||||
for module_name in os.listdir(dirpath):
|
||||
# Check if path lead to a folder
|
||||
full_path = os.path.join(dirpath, module_name)
|
||||
if not os.path.isdir(full_path):
|
||||
continue
|
||||
|
||||
# Skip known invalid names
|
||||
if module_name in self.skip_module_names:
|
||||
continue
|
||||
|
||||
import_name = "pype.modules.{}".format(module_name)
|
||||
try:
|
||||
modules.append(
|
||||
__import__(import_name, fromlist=[""])
|
||||
)
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Couldn't import {}".format(import_name), exc_info=True
|
||||
)
|
||||
|
||||
pype_module_classes = []
|
||||
for module in modules:
|
||||
try:
|
||||
pype_module_classes.extend(
|
||||
self._classes_from_module(PypeModule, module)
|
||||
)
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Couldn't import {}".format(import_name), exc_info=True
|
||||
)
|
||||
|
||||
pype_modules = {}
|
||||
for pype_module_class in pype_module_classes:
|
||||
try:
|
||||
pype_module = pype_module_class(settings)
|
||||
if pype_module.enabled:
|
||||
pype_modules[pype_module.id] = pype_module
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Couldn't create instance of {}".format(
|
||||
pype_module_class.__class__.__name__
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
return pype_modules
|
||||
|
||||
def _classes_from_module(self, superclass, module):
|
||||
classes = list()
|
||||
|
||||
def recursive_bases(klass):
|
||||
output = []
|
||||
output.extend(klass.__bases__)
|
||||
for base in klass.__bases__:
|
||||
output.extend(recursive_bases(base))
|
||||
return output
|
||||
|
||||
for name in dir(module):
|
||||
# It could be anything at this point
|
||||
obj = getattr(module, name)
|
||||
|
||||
if not inspect.isclass(obj) or not len(obj.__bases__) > 0:
|
||||
continue
|
||||
|
||||
# Use string comparison rather than `issubclass`
|
||||
# in order to support reloading of this module.
|
||||
bases = recursive_bases(obj)
|
||||
if not any(base.__name__ == superclass.__name__ for base in bases):
|
||||
continue
|
||||
|
||||
classes.append(obj)
|
||||
|
||||
return classes
|
||||
|
|
@ -97,6 +97,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -178,6 +179,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Adding metadata
|
||||
|
|
@ -228,6 +230,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Adding metadata
|
||||
|
|
@ -242,6 +245,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
self.log.warning((
|
||||
"Comment was not possible to set for AssetVersion"
|
||||
"\"{0}\". Can't set it's value to: \"{1}\""
|
||||
|
|
@ -258,6 +262,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
continue
|
||||
except Exception:
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
|
||||
self.log.warning((
|
||||
"Custom Attrubute \"{0}\""
|
||||
|
|
@ -272,6 +277,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Component
|
||||
|
|
@ -316,6 +322,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Reset members in memory
|
||||
|
|
@ -432,6 +439,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
if assetversion_entity not in used_asset_versions:
|
||||
|
|
|
|||
|
|
@ -145,4 +145,5 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
|
|
|||
|
|
@ -130,6 +130,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# TASKS
|
||||
|
|
@ -158,6 +159,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Incoming links.
|
||||
|
|
@ -167,6 +169,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Create notes.
|
||||
|
|
@ -187,6 +190,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Import children.
|
||||
|
|
@ -203,6 +207,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Create new links.
|
||||
|
|
@ -244,6 +249,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return task
|
||||
|
|
@ -258,6 +264,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return entity
|
||||
|
|
@ -272,7 +279,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
raise
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
def auto_sync_on(self, project):
|
||||
|
||||
|
|
@ -285,4 +293,5 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
raise
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
|
|
|||
|
|
@ -20,8 +20,8 @@ class CopyFile(api.Loader):
|
|||
def copy_file_to_clipboard(path):
|
||||
from avalon.vendor.Qt import QtCore, QtWidgets
|
||||
|
||||
app = QtWidgets.QApplication.instance()
|
||||
assert app, "Must have running QApplication instance"
|
||||
clipboard = QtWidgets.QApplication.clipboard()
|
||||
assert clipboard, "Must have running QApplication instance"
|
||||
|
||||
# Build mime data for clipboard
|
||||
data = QtCore.QMimeData()
|
||||
|
|
@ -29,5 +29,4 @@ class CopyFile(api.Loader):
|
|||
data.setUrls([url])
|
||||
|
||||
# Set to Clipboard
|
||||
clipboard = app.clipboard()
|
||||
clipboard.setMimeData(data)
|
||||
|
|
|
|||
|
|
@ -19,11 +19,10 @@ class CopyFilePath(api.Loader):
|
|||
|
||||
@staticmethod
|
||||
def copy_path_to_clipboard(path):
|
||||
from avalon.vendor.Qt import QtCore, QtWidgets
|
||||
from avalon.vendor.Qt import QtWidgets
|
||||
|
||||
app = QtWidgets.QApplication.instance()
|
||||
assert app, "Must have running QApplication instance"
|
||||
clipboard = QtWidgets.QApplication.clipboard()
|
||||
assert clipboard, "Must have running QApplication instance"
|
||||
|
||||
# Set to Clipboard
|
||||
clipboard = app.clipboard()
|
||||
clipboard.setText(os.path.normpath(path))
|
||||
|
|
|
|||
|
|
@ -23,123 +23,256 @@ Provides:
|
|||
|
||||
import copy
|
||||
import json
|
||||
import collections
|
||||
|
||||
from avalon import io
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAnatomyInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect Instance specific Anatomy data."""
|
||||
class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
||||
"""Collect Instance specific Anatomy data.
|
||||
|
||||
Plugin is running for all instances on context even not active instances.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.49
|
||||
label = "Collect Anatomy Instance data"
|
||||
|
||||
def process(self, instance):
|
||||
# get all the stuff from the database
|
||||
anatomy_data = copy.deepcopy(instance.context.data["anatomyData"])
|
||||
project_entity = instance.context.data["projectEntity"]
|
||||
context_asset_entity = instance.context.data["assetEntity"]
|
||||
instance_asset_entity = instance.data.get("assetEntity")
|
||||
def process(self, context):
|
||||
self.log.info("Collecting anatomy data for all instances.")
|
||||
|
||||
asset_name = instance.data["asset"]
|
||||
self.fill_missing_asset_docs(context)
|
||||
self.fill_latest_versions(context)
|
||||
self.fill_anatomy_data(context)
|
||||
|
||||
# There is possibility that assetEntity on instance is already set
|
||||
# which can happen in standalone publisher
|
||||
if (
|
||||
instance_asset_entity
|
||||
and instance_asset_entity["name"] == asset_name
|
||||
):
|
||||
asset_entity = instance_asset_entity
|
||||
self.log.info("Anatomy Data collection finished.")
|
||||
|
||||
# Check if asset name is the same as what is in context
|
||||
# - they may be different, e.g. in NukeStudio
|
||||
elif context_asset_entity["name"] == asset_name:
|
||||
asset_entity = context_asset_entity
|
||||
def fill_missing_asset_docs(self, context):
|
||||
self.log.debug("Qeurying asset documents for instances.")
|
||||
|
||||
else:
|
||||
asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project_entity["_id"]
|
||||
})
|
||||
context_asset_doc = context.data["assetEntity"]
|
||||
|
||||
subset_name = instance.data["subset"]
|
||||
version_number = instance.data.get("version")
|
||||
latest_version = None
|
||||
instances_with_missing_asset_doc = collections.defaultdict(list)
|
||||
for instance in context:
|
||||
instance_asset_doc = instance.data.get("assetEntity")
|
||||
_asset_name = instance.data["asset"]
|
||||
|
||||
if asset_entity:
|
||||
subset_entity = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_entity["_id"]
|
||||
})
|
||||
# There is possibility that assetEntity on instance is already set
|
||||
# which can happen in standalone publisher
|
||||
if (
|
||||
instance_asset_doc
|
||||
and instance_asset_doc["name"] == _asset_name
|
||||
):
|
||||
continue
|
||||
|
||||
# Check if asset name is the same as what is in context
|
||||
# - they may be different, e.g. in NukeStudio
|
||||
if context_asset_doc["name"] == _asset_name:
|
||||
instance.data["assetEntity"] = context_asset_doc
|
||||
|
||||
if subset_entity is None:
|
||||
self.log.debug("Subset entity does not exist yet.")
|
||||
else:
|
||||
version_entity = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset_entity["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if version_entity:
|
||||
latest_version = version_entity["name"]
|
||||
instances_with_missing_asset_doc[_asset_name].append(instance)
|
||||
|
||||
# If version is not specified for instance or context
|
||||
if version_number is None:
|
||||
# TODO we should be able to change default version by studio
|
||||
# preferences (like start with version number `0`)
|
||||
version_number = 1
|
||||
# use latest version (+1) if already any exist
|
||||
if latest_version is not None:
|
||||
version_number += int(latest_version)
|
||||
if not instances_with_missing_asset_doc:
|
||||
self.log.debug("All instances already had right asset document.")
|
||||
return
|
||||
|
||||
anatomy_updates = {
|
||||
"asset": asset_name,
|
||||
"family": instance.data["family"],
|
||||
"subset": subset_name,
|
||||
"version": version_number
|
||||
asset_names = list(instances_with_missing_asset_doc.keys())
|
||||
self.log.debug("Querying asset documents with names: {}".format(
|
||||
", ".join(["\"{}\"".format(name) for name in asset_names])
|
||||
))
|
||||
asset_docs = io.find({
|
||||
"type": "asset",
|
||||
"name": {"$in": asset_names}
|
||||
})
|
||||
asset_docs_by_name = {
|
||||
asset_doc["name"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
if (
|
||||
asset_entity
|
||||
and asset_entity["_id"] != context_asset_entity["_id"]
|
||||
):
|
||||
parents = asset_entity["data"].get("parents") or list()
|
||||
anatomy_updates["hierarchy"] = "/".join(parents)
|
||||
|
||||
task_name = instance.data.get("task")
|
||||
if task_name:
|
||||
anatomy_updates["task"] = task_name
|
||||
not_found_asset_names = []
|
||||
for asset_name, instances in instances_with_missing_asset_doc.items():
|
||||
asset_doc = asset_docs_by_name.get(asset_name)
|
||||
if not asset_doc:
|
||||
not_found_asset_names.append(asset_name)
|
||||
continue
|
||||
|
||||
# Version should not be collected since may be instance
|
||||
anatomy_data.update(anatomy_updates)
|
||||
for _instance in instances:
|
||||
_instance.data["assetEntity"] = asset_doc
|
||||
|
||||
resolution_width = instance.data.get("resolutionWidth")
|
||||
if resolution_width:
|
||||
anatomy_data["resolution_width"] = resolution_width
|
||||
if not_found_asset_names:
|
||||
joined_asset_names = ", ".join(
|
||||
["\"{}\"".format(name) for name in not_found_asset_names]
|
||||
)
|
||||
self.log.warning((
|
||||
"Not found asset documents with names \"{}\"."
|
||||
).format(joined_asset_names))
|
||||
|
||||
resolution_height = instance.data.get("resolutionHeight")
|
||||
if resolution_height:
|
||||
anatomy_data["resolution_height"] = resolution_height
|
||||
def fill_latest_versions(self, context):
|
||||
"""Try to find latest version for each instance's subset.
|
||||
|
||||
pixel_aspect = instance.data.get("pixelAspect")
|
||||
if pixel_aspect:
|
||||
anatomy_data["pixel_aspect"] = float("{:0.2f}".format(
|
||||
float(pixel_aspect)))
|
||||
Key "latestVersion" is always set to latest version or `None`.
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps:
|
||||
anatomy_data["fps"] = float("{:0.2f}".format(
|
||||
float(fps)))
|
||||
Args:
|
||||
context (pyblish.Context)
|
||||
|
||||
instance.data["projectEntity"] = project_entity
|
||||
instance.data["assetEntity"] = asset_entity
|
||||
instance.data["anatomyData"] = anatomy_data
|
||||
instance.data["latestVersion"] = latest_version
|
||||
# TODO should be version number set here?
|
||||
instance.data["version"] = version_number
|
||||
Returns:
|
||||
None
|
||||
|
||||
self.log.info("Instance anatomy Data collected")
|
||||
self.log.debug(json.dumps(anatomy_data, indent=4))
|
||||
"""
|
||||
self.log.debug("Qeurying latest versions for instances.")
|
||||
|
||||
hierarchy = {}
|
||||
subset_names = set()
|
||||
asset_ids = set()
|
||||
for instance in context:
|
||||
# Make sure `"latestVersion"` key is set
|
||||
latest_version = instance.data.get("latestVersion")
|
||||
instance.data["latestVersion"] = latest_version
|
||||
|
||||
# Skip instances withou "assetEntity"
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if not asset_doc:
|
||||
continue
|
||||
|
||||
# Store asset ids and subset names for queries
|
||||
asset_id = asset_doc["_id"]
|
||||
subset_name = instance.data["subset"]
|
||||
asset_ids.add(asset_id)
|
||||
subset_names.add(subset_name)
|
||||
|
||||
# Prepare instance hiearchy for faster filling latest versions
|
||||
if asset_id not in hierarchy:
|
||||
hierarchy[asset_id] = {}
|
||||
if subset_name not in hierarchy[asset_id]:
|
||||
hierarchy[asset_id][subset_name] = []
|
||||
hierarchy[asset_id][subset_name].append(instance)
|
||||
|
||||
subset_docs = list(io.find({
|
||||
"type": "subset",
|
||||
"parent": {"$in": list(asset_ids)},
|
||||
"name": {"$in": list(subset_names)}
|
||||
}))
|
||||
|
||||
subset_ids = [
|
||||
subset_doc["_id"]
|
||||
for subset_doc in subset_docs
|
||||
]
|
||||
|
||||
last_version_by_subset_id = self._query_last_versions(subset_ids)
|
||||
for subset_doc in subset_docs:
|
||||
subset_id = subset_doc["_id"]
|
||||
last_version = last_version_by_subset_id.get(subset_id)
|
||||
if last_version is None:
|
||||
continue
|
||||
|
||||
asset_id = subset_doc["parent"]
|
||||
subset_name = subset_doc["name"]
|
||||
_instances = hierarchy[asset_id][subset_name]
|
||||
for _instance in _instances:
|
||||
_instance.data["latestVersion"] = last_version
|
||||
|
||||
def _query_last_versions(self, subset_ids):
|
||||
"""Retrieve all latest versions for entered subset_ids.
|
||||
|
||||
Args:
|
||||
subset_ids (list): List of subset ids with type `ObjectId`.
|
||||
|
||||
Returns:
|
||||
dict: Key is subset id and value is last version name.
|
||||
"""
|
||||
_pipeline = [
|
||||
# Find all versions of those subsets
|
||||
{"$match": {
|
||||
"type": "version",
|
||||
"parent": {"$in": subset_ids}
|
||||
}},
|
||||
# Sorting versions all together
|
||||
{"$sort": {"name": 1}},
|
||||
# Group them by "parent", but only take the last
|
||||
{"$group": {
|
||||
"_id": "$parent",
|
||||
"_version_id": {"$last": "$_id"},
|
||||
"name": {"$last": "$name"}
|
||||
}}
|
||||
]
|
||||
|
||||
last_version_by_subset_id = {}
|
||||
for doc in io.aggregate(_pipeline):
|
||||
subset_id = doc["_id"]
|
||||
last_version_by_subset_id[subset_id] = doc["name"]
|
||||
|
||||
return last_version_by_subset_id
|
||||
|
||||
def fill_anatomy_data(self, context):
|
||||
self.log.debug("Storing anatomy data to instance data.")
|
||||
|
||||
project_doc = context.data["projectEntity"]
|
||||
context_asset_doc = context.data["assetEntity"]
|
||||
|
||||
for instance in context:
|
||||
version_number = instance.data.get("version")
|
||||
# If version is not specified for instance or context
|
||||
if version_number is None:
|
||||
# TODO we should be able to change default version by studio
|
||||
# preferences (like start with version number `0`)
|
||||
version_number = 1
|
||||
# use latest version (+1) if already any exist
|
||||
latest_version = instance.data["latestVersion"]
|
||||
if latest_version is not None:
|
||||
version_number += int(latest_version)
|
||||
|
||||
anatomy_updates = {
|
||||
"asset": instance.data["asset"],
|
||||
"family": instance.data["family"],
|
||||
"subset": instance.data["subset"],
|
||||
"version": version_number
|
||||
}
|
||||
|
||||
# Hiearchy
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if asset_doc and asset_doc["_id"] != context_asset_doc["_id"]:
|
||||
parents = asset_doc["data"].get("parents") or list()
|
||||
anatomy_updates["hierarchy"] = "/".join(parents)
|
||||
|
||||
# Task
|
||||
task_name = instance.data.get("task")
|
||||
if task_name:
|
||||
anatomy_updates["task"] = task_name
|
||||
|
||||
# Additional data
|
||||
resolution_width = instance.data.get("resolutionWidth")
|
||||
if resolution_width:
|
||||
anatomy_updates["resolution_width"] = resolution_width
|
||||
|
||||
resolution_height = instance.data.get("resolutionHeight")
|
||||
if resolution_height:
|
||||
anatomy_updates["resolution_height"] = resolution_height
|
||||
|
||||
pixel_aspect = instance.data.get("pixelAspect")
|
||||
if pixel_aspect:
|
||||
anatomy_updates["pixel_aspect"] = float(
|
||||
"{:0.2f}".format(float(pixel_aspect))
|
||||
)
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps:
|
||||
anatomy_updates["fps"] = float("{:0.2f}".format(float(fps)))
|
||||
|
||||
anatomy_data = copy.deepcopy(context.data["anatomyData"])
|
||||
anatomy_data.update(anatomy_updates)
|
||||
|
||||
# Store anatomy data
|
||||
instance.data["projectEntity"] = project_doc
|
||||
instance.data["anatomyData"] = anatomy_data
|
||||
instance.data["version"] = version_number
|
||||
|
||||
# Log collected data
|
||||
instance_name = instance.data["name"]
|
||||
instance_label = instance.data.get("label")
|
||||
if instance_label:
|
||||
instance_name += "({})".format(instance_label)
|
||||
self.log.debug("Anatomy data for instance {}: {}".format(
|
||||
instance_name,
|
||||
json.dumps(anatomy_data, indent=4)
|
||||
))
|
||||
|
|
|
|||
|
|
@ -195,11 +195,14 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
if "delete" in new_repre["tags"]:
|
||||
new_repre["tags"].remove("delete")
|
||||
|
||||
# Update name and outputName to be able have multiple outputs
|
||||
# Join previous "outputName" with filename suffix
|
||||
new_name = "_".join([new_repre["outputName"], filename_suffix])
|
||||
new_repre["name"] = new_name
|
||||
new_repre["outputName"] = new_name
|
||||
if len(repre_burnin_defs.keys()) > 1:
|
||||
# Update name and outputName to be
|
||||
# able have multiple outputs in case of more burnin presets
|
||||
# Join previous "outputName" with filename suffix
|
||||
new_name = "_".join(
|
||||
[new_repre["outputName"], filename_suffix])
|
||||
new_repre["name"] = new_name
|
||||
new_repre["outputName"] = new_name
|
||||
|
||||
# Prepare paths and files for process.
|
||||
self.input_output_paths(new_repre, temp_data, filename_suffix)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from maya import cmds, mel
|
||||
import pymel.core as pc
|
||||
|
||||
from avalon import api
|
||||
from avalon import api, io
|
||||
from avalon.maya.pipeline import containerise
|
||||
from avalon.maya import lib
|
||||
|
||||
|
|
@ -58,6 +58,13 @@ class AudioLoader(api.Loader):
|
|||
type="string"
|
||||
)
|
||||
|
||||
# Set frame range.
|
||||
version = io.find_one({"_id": representation["parent"]})
|
||||
subset = io.find_one({"_id": version["parent"]})
|
||||
asset = io.find_one({"_id": subset["parent"]})
|
||||
audio_node.sourceStart.set(1 - asset["data"]["frameStart"])
|
||||
audio_node.sourceEnd.set(asset["data"]["frameEnd"])
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import pymel.core as pc
|
||||
import maya.cmds as cmds
|
||||
|
||||
from avalon import api
|
||||
from avalon import api, io
|
||||
from avalon.maya.pipeline import containerise
|
||||
from avalon.maya import lib
|
||||
from Qt import QtWidgets
|
||||
|
|
@ -147,6 +147,17 @@ class ImagePlaneLoader(api.Loader):
|
|||
type="string"
|
||||
)
|
||||
|
||||
# Set frame range.
|
||||
version = io.find_one({"_id": representation["parent"]})
|
||||
subset = io.find_one({"_id": version["parent"]})
|
||||
asset = io.find_one({"_id": subset["parent"]})
|
||||
start_frame = asset["data"]["frameStart"]
|
||||
end_frame = asset["data"]["frameEnd"]
|
||||
image_plane_shape.frameOffset.set(1 - start_frame)
|
||||
image_plane_shape.frameIn.set(start_frame)
|
||||
image_plane_shape.frameOut.set(end_frame)
|
||||
image_plane_shape.frameCache.set(end_frame)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
from .lib import (
|
||||
system_settings,
|
||||
project_settings
|
||||
project_settings,
|
||||
environments
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"system_settings",
|
||||
"project_settings"
|
||||
"project_settings",
|
||||
"environments"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,5 +3,6 @@
|
|||
"PYTHONPATH": [
|
||||
"{PYPE_SETUP_PATH}/repos/avalon-core/setup/blender",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
],
|
||||
"CREATE_NEW_CONSOLE": "yes"
|
||||
}
|
||||
|
|
@ -6,21 +6,9 @@
|
|||
"PYPE_PROJECT_PLUGINS": "",
|
||||
"STUDIO_SOFT": "{PYP_SETUP_ROOT}/soft",
|
||||
"FFMPEG_PATH": {
|
||||
"windows": "{VIRTUAL_ENV}/localized/ffmpeg_exec/windows/bin;{PYPE_SETUP_PATH}/vendor/ffmpeg_exec/windows/bin",
|
||||
"darwin": "{VIRTUAL_ENV}/localized/ffmpeg_exec/darwin/bin:{PYPE_SETUP_PATH}/vendor/ffmpeg_exec/darwin/bin",
|
||||
"linux": "{VIRTUAL_ENV}/localized/ffmpeg_exec/linux:{PYPE_SETUP_PATH}/vendor/ffmpeg_exec/linux"
|
||||
},
|
||||
"DJV_PATH": {
|
||||
"windows": [
|
||||
"C:/Program Files/djv-1.1.0-Windows-64/bin/djv_view.exe",
|
||||
"C:/Program Files/DJV/bin/djv_view.exe",
|
||||
"{STUDIO_SOFT}/djv/windows/bin/djv_view.exe"
|
||||
],
|
||||
"linux": [
|
||||
"usr/local/djv/djv_view",
|
||||
"{STUDIO_SOFT}/djv/linux/bin/djv_view"
|
||||
],
|
||||
"darwin": "Application/DJV.app/Contents/MacOS/DJV"
|
||||
"windows": "{VIRTUAL_ENV}/localized/ffmpeg_exec/windows/bin;{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/windows/bin",
|
||||
"darwin": "{VIRTUAL_ENV}/localized/ffmpeg_exec/darwin/bin:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/darwin/bin",
|
||||
"linux": "{VIRTUAL_ENV}/localized/ffmpeg_exec/linux:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/linux"
|
||||
},
|
||||
"PATH": [
|
||||
"{PYPE_CONFIG}/launchers",
|
||||
7
pype/settings/defaults/environments/photoshop.json
Normal file
7
pype/settings/defaults/environments/photoshop.json
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH": "1",
|
||||
"PYTHONPATH": "{PYTHONPATH}",
|
||||
"PYPE_LOG_NO_COLORS": "Yes",
|
||||
"WEBSOCKET_URL": "ws://localhost:8099/ws/",
|
||||
"WORKFILES_SAVE_AS": "Yes"
|
||||
}
|
||||
8
pype/settings/defaults/launchers/blender_2.80.toml
Normal file
8
pype/settings/defaults/launchers/blender_2.80.toml
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
application_dir = "blender"
|
||||
executable = "blender_2.80"
|
||||
schema = "avalon-core:application-1.0"
|
||||
label = "Blender"
|
||||
label_variant = "2.80"
|
||||
ftrack_label = "Blender"
|
||||
icon = "app_icons/blender.png"
|
||||
ftrack_icon = "{}/app_icons/blender.png"
|
||||
9
pype/settings/defaults/launchers/blender_2.81.toml
Normal file
9
pype/settings/defaults/launchers/blender_2.81.toml
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
application_dir = "blender"
|
||||
executable = "blender_2.81"
|
||||
schema = "avalon-core:application-1.0"
|
||||
label = "Blender"
|
||||
label_variant = "2.81"
|
||||
icon = "app_icons/blender.png"
|
||||
|
||||
ftrack_label = "Blender"
|
||||
ftrack_icon = '{}/app_icons/blender.png'
|
||||
9
pype/settings/defaults/launchers/blender_2.82.toml
Normal file
9
pype/settings/defaults/launchers/blender_2.82.toml
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
application_dir = "blender"
|
||||
executable = "blender_2.82"
|
||||
schema = "avalon-core:application-1.0"
|
||||
label = "Blender"
|
||||
label_variant = "2.82"
|
||||
icon = "app_icons/blender.png"
|
||||
|
||||
ftrack_label = "Blender"
|
||||
ftrack_icon = '{}/app_icons/blender.png'
|
||||
9
pype/settings/defaults/launchers/blender_2.83.toml
Normal file
9
pype/settings/defaults/launchers/blender_2.83.toml
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
application_dir = "blender"
|
||||
executable = "blender_2.83"
|
||||
schema = "avalon-core:application-1.0"
|
||||
label = "Blender"
|
||||
label_variant = "2.83"
|
||||
icon = "app_icons/blender.png"
|
||||
|
||||
ftrack_label = "Blender"
|
||||
ftrack_icon = '{}/app_icons/blender.png'
|
||||
9
pype/settings/defaults/launchers/celaction_local.toml
Normal file
9
pype/settings/defaults/launchers/celaction_local.toml
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
executable = "celaction_local"
|
||||
schema = "avalon-core:application-1.0"
|
||||
application_dir = "celaction"
|
||||
label = "CelAction2D"
|
||||
icon = "app_icons/celaction_local.png"
|
||||
launch_hook = "pype/hooks/celaction/prelaunch.py/CelactionPrelaunchHook"
|
||||
|
||||
ftrack_label = "CelAction2D"
|
||||
ftrack_icon = '{}/app_icons/celaction_local.png'
|
||||
8
pype/settings/defaults/launchers/celaction_publish.toml
Normal file
8
pype/settings/defaults/launchers/celaction_publish.toml
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
schema = "avalon-core:application-1.0"
|
||||
application_dir = "shell"
|
||||
executable = "celaction_publish"
|
||||
label = "Celaction Shell"
|
||||
icon = "app_icons/celaction.png"
|
||||
|
||||
[environment]
|
||||
CREATE_NEW_CONSOLE = "Yes"
|
||||
2
pype/settings/defaults/launchers/darwin/blender_2.82
Normal file
2
pype/settings/defaults/launchers/darwin/blender_2.82
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
open -a blender $@
|
||||
9
pype/settings/defaults/launchers/darwin/harmony_17
Normal file
9
pype/settings/defaults/launchers/darwin/harmony_17
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
DIRNAME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
set >~/environment.tmp
|
||||
if [ $? -ne -0 ] ; then
|
||||
echo "ERROR: cannot write to '~/environment.tmp'!"
|
||||
read -n 1 -s -r -p "Press any key to exit"
|
||||
return
|
||||
fi
|
||||
open -a Terminal.app "$DIRNAME/harmony_17_launch"
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
source ~/environment.tmp
|
||||
export $(cut -d= -f1 ~/environment.tmp)
|
||||
exe="/Applications/Toon Boom Harmony 17 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium"
|
||||
$PYPE_PYTHON_EXE -c "import avalon.harmony;avalon.harmony.launch('$exe')"
|
||||
2
pype/settings/defaults/launchers/darwin/python3
Normal file
2
pype/settings/defaults/launchers/darwin/python3
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
open /usr/bin/python3 --args $@
|
||||
9
pype/settings/defaults/launchers/harmony_17.toml
Normal file
9
pype/settings/defaults/launchers/harmony_17.toml
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
application_dir = "harmony"
|
||||
label = "Harmony"
|
||||
label_variant = "17"
|
||||
ftrack_label = "Harmony"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "harmony_17"
|
||||
description = ""
|
||||
icon = "app_icons/harmony.png"
|
||||
ftrack_icon = '{}/app_icons/harmony.png'
|
||||
8
pype/settings/defaults/launchers/houdini_16.toml
Normal file
8
pype/settings/defaults/launchers/houdini_16.toml
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
executable = "houdini_16"
|
||||
schema = "avalon-core:application-1.0"
|
||||
application_dir = "houdini"
|
||||
label = "Houdini"
|
||||
label_variant = "16"
|
||||
ftrack_label = "Houdini"
|
||||
icon = "app_icons/houdini.png"
|
||||
ftrack_icon = '{}/app_icons/houdini.png'
|
||||
8
pype/settings/defaults/launchers/houdini_17.toml
Normal file
8
pype/settings/defaults/launchers/houdini_17.toml
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
executable = "houdini_17"
|
||||
schema = "avalon-core:application-1.0"
|
||||
application_dir = "houdini"
|
||||
label = "Houdini"
|
||||
label_variant = "17"
|
||||
ftrack_label = "Houdini"
|
||||
icon = "app_icons/houdini.png"
|
||||
ftrack_icon = '{}/app_icons/houdini.png'
|
||||
8
pype/settings/defaults/launchers/houdini_18.toml
Normal file
8
pype/settings/defaults/launchers/houdini_18.toml
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
executable = "houdini_18"
|
||||
schema = "avalon-core:application-1.0"
|
||||
application_dir = "houdini"
|
||||
label = "Houdini"
|
||||
label_variant = "18"
|
||||
ftrack_label = "Houdini"
|
||||
icon = "app_icons/houdini.png"
|
||||
ftrack_icon = '{}/app_icons/houdini.png'
|
||||
8
pype/settings/defaults/launchers/linux/maya2016
Normal file
8
pype/settings/defaults/launchers/linux/maya2016
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
maya_path = "/usr/autodesk/maya2016/bin/maya"
|
||||
|
||||
if [[ -z $PYPE_LOG_NO_COLORS ]]; then
|
||||
$maya_path -file "$AVALON_LAST_WORKFILE" $@
|
||||
else
|
||||
$maya_path $@
|
||||
8
pype/settings/defaults/launchers/linux/maya2017
Normal file
8
pype/settings/defaults/launchers/linux/maya2017
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
maya_path = "/usr/autodesk/maya2017/bin/maya"
|
||||
|
||||
if [[ -z $AVALON_LAST_WORKFILE ]]; then
|
||||
$maya_path -file "$AVALON_LAST_WORKFILE" $@
|
||||
else
|
||||
$maya_path $@
|
||||
8
pype/settings/defaults/launchers/linux/maya2018
Normal file
8
pype/settings/defaults/launchers/linux/maya2018
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
maya_path = "/usr/autodesk/maya2018/bin/maya"
|
||||
|
||||
if [[ -z $AVALON_LAST_WORKFILE ]]; then
|
||||
$maya_path -file "$AVALON_LAST_WORKFILE" $@
|
||||
else
|
||||
$maya_path $@
|
||||
8
pype/settings/defaults/launchers/linux/maya2019
Normal file
8
pype/settings/defaults/launchers/linux/maya2019
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
maya_path = "/usr/autodesk/maya2019/bin/maya"
|
||||
|
||||
if [[ -z $AVALON_LAST_WORKFILE ]]; then
|
||||
$maya_path -file "$AVALON_LAST_WORKFILE" $@
|
||||
else
|
||||
$maya_path $@
|
||||
8
pype/settings/defaults/launchers/linux/maya2020
Normal file
8
pype/settings/defaults/launchers/linux/maya2020
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
maya_path = "/usr/autodesk/maya2020/bin/maya"
|
||||
|
||||
if [[ -z $AVALON_LAST_WORKFILE ]]; then
|
||||
$maya_path -file "$AVALON_LAST_WORKFILE" $@
|
||||
else
|
||||
$maya_path $@
|
||||
2
pype/settings/defaults/launchers/linux/nuke11.3
Normal file
2
pype/settings/defaults/launchers/linux/nuke11.3
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
gnome-terminal -e '/usr/local/Nuke11.3v5/Nuke11.3'
|
||||
2
pype/settings/defaults/launchers/linux/nuke12.0
Normal file
2
pype/settings/defaults/launchers/linux/nuke12.0
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
gnome-terminal -e '/usr/local/Nuke12.0v1/Nuke12.0'
|
||||
2
pype/settings/defaults/launchers/linux/nukestudio11.3
Normal file
2
pype/settings/defaults/launchers/linux/nukestudio11.3
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
gnome-terminal -e '/usr/local/Nuke11.3v5/Nuke11.3 --studio'
|
||||
2
pype/settings/defaults/launchers/linux/nukestudio12.0
Normal file
2
pype/settings/defaults/launchers/linux/nukestudio12.0
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
gnome-terminal -e '/usr/local/Nuke12.0v1/Nuke12.0 --studio'
|
||||
2
pype/settings/defaults/launchers/linux/nukex11.3
Normal file
2
pype/settings/defaults/launchers/linux/nukex11.3
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
gnome-terminal -e '/usr/local/Nuke11.3v5/Nuke11.3 -nukex'
|
||||
2
pype/settings/defaults/launchers/linux/nukex12.0
Normal file
2
pype/settings/defaults/launchers/linux/nukex12.0
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
gnome-terminal -e '/usr/local/Nuke12.0v1/Nuke12.0 -nukex'
|
||||
27
pype/settings/defaults/launchers/maya_2016.toml
Normal file
27
pype/settings/defaults/launchers/maya_2016.toml
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya"
|
||||
label_variant = "2016"
|
||||
ftrack_label = "Maya"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "maya2016"
|
||||
description = ""
|
||||
icon = "app_icons/maya.png"
|
||||
ftrack_icon = '{}/app_icons/maya.png'
|
||||
|
||||
[copy]
|
||||
"{PYPE_MODULE_ROOT}/pype/resources/maya/workspace.mel" = "workspace.mel"
|
||||
|
||||
[environment]
|
||||
MAYA_DISABLE_CLIC_IPM = "Yes" # Disable the AdSSO process
|
||||
MAYA_DISABLE_CIP = "Yes" # Shorten time to boot
|
||||
MAYA_DISABLE_CER = "Yes"
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
29
pype/settings/defaults/launchers/maya_2017.toml
Normal file
29
pype/settings/defaults/launchers/maya_2017.toml
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya"
|
||||
label_variant = "2017"
|
||||
ftrack_label = "Maya"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "maya2017"
|
||||
description = ""
|
||||
icon = "app_icons/maya.png"
|
||||
ftrack_icon = '{}/app_icons/maya.png'
|
||||
|
||||
[copy]
|
||||
"{PYPE_MODULE_ROOT}/pype/resources/maya/workspace.mel" = "workspace.mel"
|
||||
|
||||
[environment]
|
||||
MAYA_DISABLE_CLIC_IPM = "Yes" # Disable the AdSSO process
|
||||
MAYA_DISABLE_CIP = "Yes" # Shorten time to boot
|
||||
MAYA_DISABLE_CER = "Yes"
|
||||
PYMEL_SKIP_MEL_INIT = "Yes"
|
||||
LC_ALL= "C" # Mute color management warnings
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
15
pype/settings/defaults/launchers/maya_2018.toml
Normal file
15
pype/settings/defaults/launchers/maya_2018.toml
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"renders"
|
||||
]
|
||||
label = "Autodesk Maya"
|
||||
label_variant = "2018"
|
||||
ftrack_label = "Maya"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "maya2018"
|
||||
description = ""
|
||||
icon = "app_icons/maya.png"
|
||||
ftrack_icon = '{}/app_icons/maya.png'
|
||||
|
||||
[copy]
|
||||
"{PYPE_MODULE_ROOT}/pype/resources/maya/workspace.mel" = "workspace.mel"
|
||||
15
pype/settings/defaults/launchers/maya_2019.toml
Normal file
15
pype/settings/defaults/launchers/maya_2019.toml
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"renders"
|
||||
]
|
||||
label = "Autodesk Maya"
|
||||
label_variant = "2019"
|
||||
ftrack_label = "Maya"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "maya2019"
|
||||
description = ""
|
||||
icon = "app_icons/maya.png"
|
||||
ftrack_icon = '{}/app_icons/maya.png'
|
||||
|
||||
[copy]
|
||||
"{PYPE_MODULE_ROOT}/pype/resources/maya/workspace.mel" = "workspace.mel"
|
||||
15
pype/settings/defaults/launchers/maya_2020.toml
Normal file
15
pype/settings/defaults/launchers/maya_2020.toml
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"renders"
|
||||
]
|
||||
label = "Autodesk Maya"
|
||||
label_variant = "2020"
|
||||
ftrack_label = "Maya"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "maya2020"
|
||||
description = ""
|
||||
icon = "app_icons/maya.png"
|
||||
ftrack_icon = '{}/app_icons/maya.png'
|
||||
|
||||
[copy]
|
||||
"{PYPE_MODULE_ROOT}/pype/resources/maya/workspace.mel" = "workspace.mel"
|
||||
17
pype/settings/defaults/launchers/mayabatch_2019.toml
Normal file
17
pype/settings/defaults/launchers/mayabatch_2019.toml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya 2019x64"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "mayabatch2019"
|
||||
description = ""
|
||||
|
||||
[environment]
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
17
pype/settings/defaults/launchers/mayabatch_2020.toml
Normal file
17
pype/settings/defaults/launchers/mayabatch_2020.toml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya 2020x64"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "mayabatch2020"
|
||||
description = ""
|
||||
|
||||
[environment]
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
17
pype/settings/defaults/launchers/mayapy2016.toml
Normal file
17
pype/settings/defaults/launchers/mayapy2016.toml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya 2016x64"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "mayapy2016"
|
||||
description = ""
|
||||
|
||||
[environment]
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
17
pype/settings/defaults/launchers/mayapy2017.toml
Normal file
17
pype/settings/defaults/launchers/mayapy2017.toml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya 2017x64"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "mayapy2017"
|
||||
description = ""
|
||||
|
||||
[environment]
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
17
pype/settings/defaults/launchers/mayapy2018.toml
Normal file
17
pype/settings/defaults/launchers/mayapy2018.toml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya 2018x64"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "mayapy2017"
|
||||
description = ""
|
||||
|
||||
[environment]
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
17
pype/settings/defaults/launchers/mayapy2019.toml
Normal file
17
pype/settings/defaults/launchers/mayapy2019.toml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya 2019x64"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "mayapy2019"
|
||||
description = ""
|
||||
|
||||
[environment]
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
17
pype/settings/defaults/launchers/mayapy2020.toml
Normal file
17
pype/settings/defaults/launchers/mayapy2020.toml
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
application_dir = "maya"
|
||||
default_dirs = [
|
||||
"scenes",
|
||||
"data",
|
||||
"renderData/shaders",
|
||||
"images"
|
||||
]
|
||||
label = "Autodesk Maya 2020x64"
|
||||
schema = "avalon-core:application-1.0"
|
||||
executable = "mayapy2020"
|
||||
description = ""
|
||||
|
||||
[environment]
|
||||
PYTHONPATH = [
|
||||
"{AVALON_CORE}/setup/maya",
|
||||
"{PYTHONPATH}"
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue