mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch '2.x/develop' into feature/567-Nuke_Publish_Camera
This commit is contained in:
commit
d6c6f495cd
52 changed files with 1096 additions and 413 deletions
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
from uuid import uuid4
|
||||
|
||||
from avalon import api, io, harmony
|
||||
from avalon.vendor import Qt
|
||||
|
|
@ -8,8 +9,11 @@ import pyblish.api
|
|||
from pype import lib
|
||||
|
||||
|
||||
signature = str(uuid4())
|
||||
|
||||
|
||||
def set_scene_settings(settings):
|
||||
func = """function func(args)
|
||||
func = """function %s_func(args)
|
||||
{
|
||||
if (args[0]["fps"])
|
||||
{
|
||||
|
|
@ -36,8 +40,8 @@ def set_scene_settings(settings):
|
|||
)
|
||||
}
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s_func
|
||||
""" % (signature, signature)
|
||||
harmony.send({"function": func, "args": [settings]})
|
||||
|
||||
|
||||
|
|
@ -107,15 +111,15 @@ def check_inventory():
|
|||
outdated_containers.append(container)
|
||||
|
||||
# Colour nodes.
|
||||
func = """function func(args){
|
||||
func = """function %s_func(args){
|
||||
for( var i =0; i <= args[0].length - 1; ++i)
|
||||
{
|
||||
var red_color = new ColorRGBA(255, 0, 0, 255);
|
||||
node.setColor(args[0][i], red_color);
|
||||
}
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s_func
|
||||
""" % (signature, signature)
|
||||
outdated_nodes = []
|
||||
for container in outdated_containers:
|
||||
if container["loader"] == "ImageSequenceLoader":
|
||||
|
|
@ -144,7 +148,7 @@ def application_launch():
|
|||
|
||||
|
||||
def export_template(backdrops, nodes, filepath):
|
||||
func = """function func(args)
|
||||
func = """function %s_func(args)
|
||||
{
|
||||
|
||||
var temp_node = node.add("Top", "temp_note", "NOTE", 0, 0, 0);
|
||||
|
|
@ -179,8 +183,8 @@ def export_template(backdrops, nodes, filepath):
|
|||
Action.perform("onActionUpToParent()", "Node View");
|
||||
node.deleteNode(template_group, true, true);
|
||||
}
|
||||
func
|
||||
"""
|
||||
%s_func
|
||||
""" % (signature, signature)
|
||||
harmony.send({
|
||||
"function": func,
|
||||
"args": [
|
||||
|
|
@ -221,12 +225,15 @@ def install():
|
|||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle node enabling on instance toggles."""
|
||||
func = """function func(args)
|
||||
func = """function %s_func(args)
|
||||
{
|
||||
node.setEnable(args[0], args[1])
|
||||
}
|
||||
func
|
||||
"""
|
||||
harmony.send(
|
||||
{"function": func, "args": [instance[0], new_value]}
|
||||
)
|
||||
%s_func
|
||||
""" % (signature, signature)
|
||||
try:
|
||||
harmony.send(
|
||||
{"function": func, "args": [instance[0], new_value]}
|
||||
)
|
||||
except IndexError:
|
||||
print(f"Instance '{instance}' is missing node")
|
||||
|
|
|
|||
|
|
@ -32,8 +32,19 @@ def deferred():
|
|||
command=lambda *args: BuildWorkfile().process()
|
||||
)
|
||||
|
||||
def add_look_assigner_item():
|
||||
import mayalookassigner
|
||||
cmds.menuItem(
|
||||
"Look assigner",
|
||||
parent=pipeline._menu,
|
||||
command=lambda *args: mayalookassigner.show()
|
||||
)
|
||||
|
||||
log.info("Attempting to install scripts menu..")
|
||||
|
||||
add_build_workfiles_item()
|
||||
add_look_assigner_item()
|
||||
|
||||
try:
|
||||
import scriptsmenu.launchformaya as launchformaya
|
||||
import scriptsmenu.scriptsmenu as scriptsmenu
|
||||
|
|
@ -42,7 +53,6 @@ def deferred():
|
|||
"Skipping studio.menu install, because "
|
||||
"'scriptsmenu' module seems unavailable."
|
||||
)
|
||||
add_build_workfiles_item()
|
||||
return
|
||||
|
||||
# load configuration of custom menu
|
||||
|
|
|
|||
|
|
@ -71,8 +71,8 @@ def add_tags_from_presets():
|
|||
# Get project task types.
|
||||
tasks = io.find_one({"type": "project"})["config"]["tasks"]
|
||||
nks_pres_tags["[Tasks]"] = {}
|
||||
for task in tasks:
|
||||
nks_pres_tags["[Tasks]"][task["name"]] = {
|
||||
for task_type in tasks.keys():
|
||||
nks_pres_tags["[Tasks]"][task_type] = {
|
||||
"editable": "1",
|
||||
"note": "",
|
||||
"icon": {
|
||||
|
|
|
|||
|
|
@ -1,16 +1,27 @@
|
|||
from Qt import QtWidgets
|
||||
from avalon.tools import libraryloader
|
||||
from pype.api import Logger
|
||||
from pype.tools.launcher import LauncherWindow, actions
|
||||
|
||||
|
||||
class AvalonApps:
|
||||
def __init__(self, main_parent=None, parent=None):
|
||||
self.log = Logger().get_logger(__name__)
|
||||
self.main_parent = main_parent
|
||||
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
from avalon.tools.libraryloader import app
|
||||
from avalon import style
|
||||
from pype.tools.launcher import LauncherWindow, actions
|
||||
|
||||
self.parent = parent
|
||||
self.main_parent = main_parent
|
||||
|
||||
self.app_launcher = LauncherWindow()
|
||||
self.libraryloader = app.Window(
|
||||
icon=self.parent.icon,
|
||||
show_projects=True,
|
||||
show_libraries=True
|
||||
)
|
||||
self.libraryloader.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# actions.register_default_actions()
|
||||
actions.register_config_actions()
|
||||
|
|
@ -23,6 +34,7 @@ class AvalonApps:
|
|||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu=None):
|
||||
from Qt import QtWidgets
|
||||
# Actions
|
||||
if parent_menu is None:
|
||||
if self.parent is None:
|
||||
|
|
@ -52,9 +64,11 @@ class AvalonApps:
|
|||
self.app_launcher.activateWindow()
|
||||
|
||||
def show_library_loader(self):
|
||||
libraryloader.show(
|
||||
parent=self.main_parent,
|
||||
icon=self.parent.icon,
|
||||
show_projects=True,
|
||||
show_libraries=True
|
||||
)
|
||||
self.libraryloader.show()
|
||||
|
||||
# Raise and activate the window
|
||||
# for MacOS
|
||||
self.libraryloader.raise_()
|
||||
# for Windows
|
||||
self.libraryloader.activateWindow()
|
||||
self.libraryloader.refresh()
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import os
|
||||
import threading
|
||||
import time
|
||||
|
||||
from pype.api import Logger
|
||||
from avalon import style
|
||||
from Qt import QtWidgets
|
||||
from .widgets import ClockifySettings, MessageWidget
|
||||
from .clockify_api import ClockifyAPI
|
||||
from .constants import CLOCKIFY_FTRACK_USER_PATH
|
||||
|
||||
|
|
@ -17,11 +16,21 @@ class ClockifyModule:
|
|||
|
||||
os.environ["CLOCKIFY_WORKSPACE"] = self.workspace_name
|
||||
|
||||
self.timer_manager = None
|
||||
self.MessageWidgetClass = None
|
||||
|
||||
self.clockapi = ClockifyAPI(master_parent=self)
|
||||
|
||||
self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
from .widgets import ClockifySettings, MessageWidget
|
||||
|
||||
self.MessageWidgetClass = MessageWidget
|
||||
|
||||
self.main_parent = main_parent
|
||||
self.parent = parent
|
||||
self.clockapi = ClockifyAPI(master_parent=self)
|
||||
self.message_widget = None
|
||||
self.widget_settings = ClockifySettings(main_parent, self)
|
||||
self.widget_settings_required = None
|
||||
|
|
@ -57,11 +66,10 @@ class ClockifyModule:
|
|||
)
|
||||
|
||||
if 'AvalonApps' in modules:
|
||||
from launcher import lib
|
||||
actions_path = os.path.sep.join([
|
||||
actions_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'launcher_actions'
|
||||
])
|
||||
)
|
||||
current = os.environ.get('AVALON_ACTIONS', '')
|
||||
if current:
|
||||
current += os.pathsep
|
||||
|
|
@ -78,12 +86,12 @@ class ClockifyModule:
|
|||
self.stop_timer()
|
||||
|
||||
def timer_started(self, data):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if self.timer_manager:
|
||||
self.timer_manager.start_timers(data)
|
||||
|
||||
def timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if self.timer_manager:
|
||||
self.timer_manager.stop_timers()
|
||||
|
||||
def start_timer_check(self):
|
||||
|
|
@ -102,7 +110,7 @@ class ClockifyModule:
|
|||
self.thread_timer_check = None
|
||||
|
||||
def check_running(self):
|
||||
import time
|
||||
|
||||
while self.bool_thread_check_running is True:
|
||||
bool_timer_run = False
|
||||
if self.clockapi.get_in_progress() is not None:
|
||||
|
|
@ -156,15 +164,14 @@ class ClockifyModule:
|
|||
self.timer_stopped()
|
||||
|
||||
def signed_in(self):
|
||||
if hasattr(self, 'timer_manager'):
|
||||
if not self.timer_manager:
|
||||
return
|
||||
if not self.timer_manager:
|
||||
return
|
||||
|
||||
if not self.timer_manager.last_task:
|
||||
return
|
||||
if not self.timer_manager.last_task:
|
||||
return
|
||||
|
||||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
|
||||
def start_timer(self, input_data):
|
||||
# If not api key is not entered then skip
|
||||
|
|
@ -197,11 +204,14 @@ class ClockifyModule:
|
|||
"<br><br>Please inform your Project Manager."
|
||||
).format(project_name, str(self.clockapi.workspace_name))
|
||||
|
||||
self.message_widget = MessageWidget(
|
||||
self.main_parent, msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(self.on_message_widget_close)
|
||||
self.message_widget.show()
|
||||
if self.MessageWidgetClass:
|
||||
self.message_widget = self.MessageWidgetClass(
|
||||
self.main_parent, msg, "Clockify - Info Message"
|
||||
)
|
||||
self.message_widget.closed.connect(
|
||||
self.on_message_widget_close
|
||||
)
|
||||
self.message_widget.show()
|
||||
|
||||
return
|
||||
|
||||
|
|
@ -227,31 +237,29 @@ class ClockifyModule:
|
|||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
self.menu = QtWidgets.QMenu('Clockify', parent_menu)
|
||||
self.menu.setProperty('submenu', 'on')
|
||||
self.menu.setStyleSheet(style.load_stylesheet())
|
||||
from Qt import QtWidgets
|
||||
menu = QtWidgets.QMenu("Clockify", parent_menu)
|
||||
menu.setProperty("submenu", "on")
|
||||
|
||||
# Actions
|
||||
self.aShowSettings = QtWidgets.QAction(
|
||||
"Settings", self.menu
|
||||
)
|
||||
self.aStopTimer = QtWidgets.QAction(
|
||||
"Stop timer", self.menu
|
||||
)
|
||||
action_show_settings = QtWidgets.QAction("Settings", menu)
|
||||
action_stop_timer = QtWidgets.QAction("Stop timer", menu)
|
||||
|
||||
self.menu.addAction(self.aShowSettings)
|
||||
self.menu.addAction(self.aStopTimer)
|
||||
menu.addAction(action_show_settings)
|
||||
menu.addAction(action_stop_timer)
|
||||
|
||||
self.aShowSettings.triggered.connect(self.show_settings)
|
||||
self.aStopTimer.triggered.connect(self.stop_timer)
|
||||
action_show_settings.triggered.connect(self.show_settings)
|
||||
action_stop_timer.triggered.connect(self.stop_timer)
|
||||
|
||||
self.action_stop_timer = action_stop_timer
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
parent_menu.addMenu(self.menu)
|
||||
parent_menu.addMenu(menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
self.widget_settings.show()
|
||||
|
||||
def set_menu_visibility(self):
|
||||
self.aStopTimer.setVisible(self.bool_timer_run)
|
||||
self.action_stop_timer.setVisible(self.bool_timer_run)
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class ClockifySync(api.Action):
|
|||
|
||||
projects_info = {}
|
||||
for project in projects_to_sync:
|
||||
task_types = [task['name'] for task in project['config']['tasks']]
|
||||
task_types = project['config']['tasks'].keys()
|
||||
projects_info[project['name']] = task_types
|
||||
|
||||
clockify_projects = self.clockapi.get_projects()
|
||||
|
|
|
|||
|
|
@ -1,2 +1,12 @@
|
|||
from .lib import *
|
||||
from . import ftrack_server
|
||||
from .ftrack_server import FtrackServer, check_ftrack_url
|
||||
from .lib import BaseHandler, BaseEvent, BaseAction
|
||||
|
||||
__all__ = (
|
||||
"ftrack_server",
|
||||
"FtrackServer",
|
||||
"check_ftrack_url",
|
||||
"BaseHandler",
|
||||
"BaseEvent",
|
||||
"BaseAction"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,2 +1,8 @@
|
|||
from .ftrack_server import FtrackServer
|
||||
from .lib import check_ftrack_url
|
||||
|
||||
|
||||
__all__ = (
|
||||
"FtrackServer",
|
||||
"check_ftrack_url"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -16,9 +16,9 @@ import pymongo
|
|||
from pype.api import decompose_url
|
||||
|
||||
|
||||
class NotActiveTable(Exception):
|
||||
class NotActiveCollection(Exception):
|
||||
def __init__(self, *args, **kwargs):
|
||||
msg = "Active table is not set. (This is bug)"
|
||||
msg = "Active collection is not set. (This is bug)"
|
||||
if not (args or kwargs):
|
||||
args = [msg]
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
@ -40,12 +40,12 @@ def auto_reconnect(func):
|
|||
return decorated
|
||||
|
||||
|
||||
def check_active_table(func):
|
||||
def check_active_collection(func):
|
||||
"""Check if CustomDbConnector has active collection."""
|
||||
@functools.wraps(func)
|
||||
def decorated(obj, *args, **kwargs):
|
||||
if not obj.active_table:
|
||||
raise NotActiveTable()
|
||||
if not obj.active_collection:
|
||||
raise NotActiveCollection()
|
||||
return func(obj, *args, **kwargs)
|
||||
return decorated
|
||||
|
||||
|
|
@ -55,7 +55,7 @@ class CustomDbConnector:
|
|||
timeout = int(os.environ["AVALON_TIMEOUT"])
|
||||
|
||||
def __init__(
|
||||
self, uri, database_name, port=None, table_name=None
|
||||
self, uri, database_name, port=None, collection_name=None
|
||||
):
|
||||
self._mongo_client = None
|
||||
self._sentry_client = None
|
||||
|
|
@ -76,10 +76,10 @@ class CustomDbConnector:
|
|||
self._port = port
|
||||
self._database_name = database_name
|
||||
|
||||
self.active_table = table_name
|
||||
self.active_collection = collection_name
|
||||
|
||||
def __getitem__(self, key):
|
||||
# gives direct access to collection withou setting `active_table`
|
||||
# gives direct access to collection withou setting `active_collection`
|
||||
return self._database[key]
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
|
|
@ -88,9 +88,11 @@ class CustomDbConnector:
|
|||
try:
|
||||
return super(CustomDbConnector, self).__getattribute__(attr)
|
||||
except AttributeError:
|
||||
if self.active_table is None:
|
||||
raise NotActiveTable()
|
||||
return self._database[self.active_table].__getattribute__(attr)
|
||||
if self.active_collection is None:
|
||||
raise NotActiveCollection()
|
||||
return self._database[self.active_collection].__getattribute__(
|
||||
attr
|
||||
)
|
||||
|
||||
def install(self):
|
||||
"""Establish a persistent connection to the database"""
|
||||
|
|
@ -146,46 +148,30 @@ class CustomDbConnector:
|
|||
self._is_installed = False
|
||||
atexit.unregister(self.uninstall)
|
||||
|
||||
def create_table(self, name, **options):
|
||||
if self.exist_table(name):
|
||||
def collection_exists(self, collection_name):
|
||||
return collection_name in self.collections()
|
||||
|
||||
def create_collection(self, name, **options):
|
||||
if self.collection_exists(name):
|
||||
return
|
||||
|
||||
return self._database.create_collection(name, **options)
|
||||
|
||||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def create_table(self, name, **options):
|
||||
if self.exist_table(name):
|
||||
return
|
||||
|
||||
return self._database.create_collection(name, **options)
|
||||
|
||||
def exist_table(self, table_name):
|
||||
return table_name in self.tables()
|
||||
|
||||
def tables(self):
|
||||
"""List available tables
|
||||
Returns:
|
||||
list of table names
|
||||
"""
|
||||
collection_names = self.collections()
|
||||
for table_name in collection_names:
|
||||
if table_name in ("system.indexes",):
|
||||
continue
|
||||
yield table_name
|
||||
|
||||
@auto_reconnect
|
||||
def collections(self):
|
||||
return self._database.collection_names()
|
||||
for col_name in self._database.collection_names():
|
||||
if col_name not in ("system.indexes",):
|
||||
yield col_name
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def insert_one(self, item, **options):
|
||||
assert isinstance(item, dict), "item must be of type <dict>"
|
||||
return self._database[self.active_table].insert_one(item, **options)
|
||||
return self._database[self.active_collection].insert_one(
|
||||
item, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def insert_many(self, items, ordered=True, **options):
|
||||
# check if all items are valid
|
||||
|
|
@ -194,72 +180,74 @@ class CustomDbConnector:
|
|||
assert isinstance(item, dict), "`item` must be of type <dict>"
|
||||
|
||||
options["ordered"] = ordered
|
||||
return self._database[self.active_table].insert_many(items, **options)
|
||||
return self._database[self.active_collection].insert_many(
|
||||
items, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def find(self, filter, projection=None, sort=None, **options):
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find(
|
||||
return self._database[self.active_collection].find(
|
||||
filter, projection, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def find_one(self, filter, projection=None, sort=None, **options):
|
||||
assert isinstance(filter, dict), "filter must be <dict>"
|
||||
options["sort"] = sort
|
||||
return self._database[self.active_table].find_one(
|
||||
return self._database[self.active_collection].find_one(
|
||||
filter,
|
||||
projection,
|
||||
**options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def replace_one(self, filter, replacement, **options):
|
||||
return self._database[self.active_table].replace_one(
|
||||
return self._database[self.active_collection].replace_one(
|
||||
filter, replacement, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def update_one(self, filter, update, **options):
|
||||
return self._database[self.active_table].update_one(
|
||||
return self._database[self.active_collection].update_one(
|
||||
filter, update, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def update_many(self, filter, update, **options):
|
||||
return self._database[self.active_table].update_many(
|
||||
return self._database[self.active_collection].update_many(
|
||||
filter, update, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def distinct(self, **options):
|
||||
return self._database[self.active_table].distinct(**options)
|
||||
return self._database[self.active_collection].distinct(**options)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def drop_collection(self, name_or_collection, **options):
|
||||
return self._database[self.active_table].drop(
|
||||
return self._database[self.active_collection].drop(
|
||||
name_or_collection, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def delete_one(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_one(
|
||||
return self._database[self.active_collection].delete_one(
|
||||
filter, **options
|
||||
)
|
||||
|
||||
@check_active_table
|
||||
@check_active_collection
|
||||
@auto_reconnect
|
||||
def delete_many(self, filter, collation=None, **options):
|
||||
options["collation"] = collation
|
||||
return self._database[self.active_table].delete_many(
|
||||
return self._database[self.active_collection].delete_many(
|
||||
filter, **options
|
||||
)
|
||||
|
|
@ -26,7 +26,7 @@ from pype.api import (
|
|||
compose_url
|
||||
)
|
||||
|
||||
from pype.modules.ftrack.lib.custom_db_connector import CustomDbConnector
|
||||
from .custom_db_connector import CustomDbConnector
|
||||
|
||||
|
||||
TOPIC_STATUS_SERVER = "pype.event.server.status"
|
||||
|
|
@ -153,9 +153,9 @@ class StorerEventHub(SocketBaseEventHub):
|
|||
class ProcessEventHub(SocketBaseEventHub):
|
||||
|
||||
hearbeat_msg = b"processor"
|
||||
uri, port, database, table_name = get_ftrack_event_mongo_info()
|
||||
uri, port, database, collection_name = get_ftrack_event_mongo_info()
|
||||
|
||||
is_table_created = False
|
||||
is_collection_created = False
|
||||
pypelog = Logger().get_logger("Session Processor")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -163,7 +163,7 @@ class ProcessEventHub(SocketBaseEventHub):
|
|||
self.uri,
|
||||
self.database,
|
||||
self.port,
|
||||
self.table_name
|
||||
self.collection_name
|
||||
)
|
||||
super(ProcessEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
|
|
@ -184,7 +184,7 @@ class ProcessEventHub(SocketBaseEventHub):
|
|||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(self.database, self.table_name))
|
||||
).format(self.database, self.collection_name))
|
||||
self.sock.sendall(b"MongoError")
|
||||
sys.exit(0)
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,9 @@ from pype.modules.ftrack.ftrack_server.lib import (
|
|||
get_ftrack_event_mongo_info,
|
||||
TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT
|
||||
)
|
||||
from pype.modules.ftrack.lib.custom_db_connector import CustomDbConnector
|
||||
from pype.modules.ftrack.ftrack_server.custom_db_connector import (
|
||||
CustomDbConnector
|
||||
)
|
||||
from pype.api import Logger
|
||||
|
||||
log = Logger().get_logger("Event storer")
|
||||
|
|
@ -23,8 +25,8 @@ class SessionFactory:
|
|||
session = None
|
||||
|
||||
|
||||
uri, port, database, table_name = get_ftrack_event_mongo_info()
|
||||
dbcon = CustomDbConnector(uri, database, port, table_name)
|
||||
uri, port, database, collection_name = get_ftrack_event_mongo_info()
|
||||
dbcon = CustomDbConnector(uri, database, port, collection_name)
|
||||
|
||||
# ignore_topics = ["ftrack.meta.connected"]
|
||||
ignore_topics = []
|
||||
|
|
@ -200,7 +202,7 @@ def main(args):
|
|||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(database, table_name))
|
||||
).format(database, collection_name))
|
||||
sock.sendall(b"MongoError")
|
||||
|
||||
finally:
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from bson.objectid import ObjectId
|
|||
from bson.errors import InvalidId
|
||||
from pymongo import UpdateOne
|
||||
import ftrack_api
|
||||
from pype.api import config
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
|
@ -23,9 +24,9 @@ log = Logger().get_logger(__name__)
|
|||
|
||||
# Current schemas for avalon types
|
||||
EntitySchemas = {
|
||||
"project": "avalon-core:project-2.0",
|
||||
"project": "avalon-core:project-2.1",
|
||||
"asset": "avalon-core:asset-3.0",
|
||||
"config": "avalon-core:config-1.0"
|
||||
"config": "avalon-core:config-1.1"
|
||||
}
|
||||
|
||||
# Group name of custom attributes
|
||||
|
|
@ -50,7 +51,7 @@ def check_regex(name, entity_type, in_schema=None, schema_patterns=None):
|
|||
if in_schema:
|
||||
schema_name = in_schema
|
||||
elif entity_type == "project":
|
||||
schema_name = "project-2.0"
|
||||
schema_name = "project-2.1"
|
||||
elif entity_type == "task":
|
||||
schema_name = "task"
|
||||
|
||||
|
|
@ -103,6 +104,14 @@ def get_pype_attr(session, split_hierarchical=True):
|
|||
|
||||
|
||||
def from_dict_to_set(data):
|
||||
"""
|
||||
Converts 'data' into $set part of MongoDB update command.
|
||||
Args:
|
||||
data: (dictionary) - up-to-date data from Ftrack
|
||||
|
||||
Returns:
|
||||
(dictionary) - { "$set" : "{..}"}
|
||||
"""
|
||||
result = {"$set": {}}
|
||||
dict_queue = queue.Queue()
|
||||
dict_queue.put((None, data))
|
||||
|
|
@ -114,7 +123,8 @@ def from_dict_to_set(data):
|
|||
if _key is not None:
|
||||
new_key = "{}.{}".format(_key, key)
|
||||
|
||||
if not isinstance(value, dict):
|
||||
if not isinstance(value, dict) or \
|
||||
(isinstance(value, dict) and not bool(value)): # empty dic
|
||||
result["$set"][new_key] = value
|
||||
continue
|
||||
dict_queue.put((new_key, value))
|
||||
|
|
@ -123,6 +133,8 @@ def from_dict_to_set(data):
|
|||
|
||||
def get_avalon_project_template(project_name):
|
||||
"""Get avalon template
|
||||
Args:
|
||||
project_name: (string)
|
||||
Returns:
|
||||
dictionary with templates
|
||||
"""
|
||||
|
|
@ -135,6 +147,16 @@ def get_avalon_project_template(project_name):
|
|||
|
||||
|
||||
def get_project_apps(in_app_list):
|
||||
"""
|
||||
Returns metadata information about apps in 'in_app_list' enhanced
|
||||
from toml files.
|
||||
Args:
|
||||
in_app_list: (list) - names of applications
|
||||
|
||||
Returns:
|
||||
tuple (list, dictionary) - list of dictionaries about apps
|
||||
dictionary of warnings
|
||||
"""
|
||||
apps = []
|
||||
# TODO report
|
||||
missing_toml_msg = "Missing config file for application"
|
||||
|
|
@ -239,6 +261,28 @@ def get_hierarchical_attributes(session, entity, attr_names, attr_defaults={}):
|
|||
return hier_values
|
||||
|
||||
|
||||
def get_task_short_name(task_type):
|
||||
"""
|
||||
Returns short name (code) for 'task_type'. Short name stored in
|
||||
metadata dictionary in project.config per each 'task_type'.
|
||||
Could be used in anatomy, paths etc.
|
||||
If no appropriate short name is found in mapping, 'task_type' is
|
||||
returned back unchanged.
|
||||
|
||||
Currently stores data in:
|
||||
'pype-config/presets/ftrack/project_defaults.json'
|
||||
Args:
|
||||
task_type: (string) - Animation | Modeling ...
|
||||
|
||||
Returns:
|
||||
(string) - anim | model ...
|
||||
"""
|
||||
presets = config.get_presets()['ftrack']['project_defaults']\
|
||||
.get("task_short_names")
|
||||
|
||||
return presets.get(task_type, task_type)
|
||||
|
||||
|
||||
class SyncEntitiesFactory:
|
||||
dbcon = AvalonMongoDB()
|
||||
|
||||
|
|
@ -378,7 +422,7 @@ class SyncEntitiesFactory:
|
|||
"custom_attributes": {},
|
||||
"hier_attrs": {},
|
||||
"avalon_attrs": {},
|
||||
"tasks": []
|
||||
"tasks": {}
|
||||
})
|
||||
|
||||
for entity in all_project_entities:
|
||||
|
|
@ -389,7 +433,9 @@ class SyncEntitiesFactory:
|
|||
continue
|
||||
|
||||
elif entity_type_low == "task":
|
||||
entities_dict[parent_id]["tasks"].append(entity["name"])
|
||||
# enrich task info with additional metadata
|
||||
task = {"type": entity["type"]["name"]}
|
||||
entities_dict[parent_id]["tasks"][entity["name"]] = task
|
||||
continue
|
||||
|
||||
entity_id = entity["id"]
|
||||
|
|
@ -416,6 +462,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_id(self):
|
||||
"""
|
||||
Returns dictionary of avalon tracked entities (assets stored in
|
||||
MongoDB) accessible by its '_id'
|
||||
(mongo intenal ID - example ObjectId("5f48de5830a9467b34b69798"))
|
||||
Returns:
|
||||
(dictionary) - {"(_id)": whole entity asset}
|
||||
"""
|
||||
if self._avalon_ents_by_id is None:
|
||||
self._avalon_ents_by_id = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -425,6 +478,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_ftrack_id(self):
|
||||
"""
|
||||
Returns dictionary of Mongo ids of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'ftrackId'
|
||||
(id from ftrack)
|
||||
(example '431ee3f2-e91a-11ea-bfa4-92591a5b5e3e')
|
||||
Returns:
|
||||
(dictionary) - {"(ftrackId)": "_id"}
|
||||
"""
|
||||
if self._avalon_ents_by_ftrack_id is None:
|
||||
self._avalon_ents_by_ftrack_id = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -437,6 +498,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_name(self):
|
||||
"""
|
||||
Returns dictionary of Mongo ids of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'name'
|
||||
(example 'Hero')
|
||||
Returns:
|
||||
(dictionary) - {"(name)": "_id"}
|
||||
"""
|
||||
if self._avalon_ents_by_name is None:
|
||||
self._avalon_ents_by_name = {}
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -446,6 +514,15 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_ents_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of avalon tracked entities
|
||||
(assets stored in MongoDB) accessible by its 'visualParent'
|
||||
(example ObjectId("5f48de5830a9467b34b69798"))
|
||||
|
||||
Fills 'self._avalon_archived_ents' for performance
|
||||
Returns:
|
||||
(dictionary) - {"(_id)": whole entity}
|
||||
"""
|
||||
if self._avalon_ents_by_parent_id is None:
|
||||
self._avalon_ents_by_parent_id = collections.defaultdict(list)
|
||||
for entity in self.avalon_entities:
|
||||
|
|
@ -458,6 +535,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_ents(self):
|
||||
"""
|
||||
Returns list of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_ents' for performance
|
||||
Returns:
|
||||
(list) of assets
|
||||
"""
|
||||
if self._avalon_archived_ents is None:
|
||||
self._avalon_archived_ents = [
|
||||
ent for ent in self.dbcon.find({"type": "archived_asset"})
|
||||
|
|
@ -466,6 +551,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_name(self):
|
||||
"""
|
||||
Returns list of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_name' for performance
|
||||
Returns:
|
||||
(dictionary of lists) of assets accessible by asset name
|
||||
"""
|
||||
if self._avalon_archived_by_name is None:
|
||||
self._avalon_archived_by_name = collections.defaultdict(list)
|
||||
for ent in self.avalon_archived_ents:
|
||||
|
|
@ -474,6 +567,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_id(self):
|
||||
"""
|
||||
Returns dictionary of archived assets from DB
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_id' for performance
|
||||
Returns:
|
||||
(dictionary) of assets accessible by asset mongo _id
|
||||
"""
|
||||
if self._avalon_archived_by_id is None:
|
||||
self._avalon_archived_by_id = {
|
||||
str(ent["_id"]): ent for ent in self.avalon_archived_ents
|
||||
|
|
@ -482,6 +583,15 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def avalon_archived_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of archived assets from DB per their's parent
|
||||
(their "type" == 'archived_asset')
|
||||
|
||||
Fills 'self._avalon_archived_by_parent_id' for performance
|
||||
Returns:
|
||||
(dictionary of lists) of assets accessible by asset parent
|
||||
mongo _id
|
||||
"""
|
||||
if self._avalon_archived_by_parent_id is None:
|
||||
self._avalon_archived_by_parent_id = collections.defaultdict(list)
|
||||
for entity in self.avalon_archived_ents:
|
||||
|
|
@ -494,6 +604,14 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def subsets_by_parent_id(self):
|
||||
"""
|
||||
Returns dictionary of subsets from Mongo ("type": "subset")
|
||||
grouped by their parent.
|
||||
|
||||
Fills 'self._subsets_by_parent_id' for performance
|
||||
Returns:
|
||||
(dictionary of lists)
|
||||
"""
|
||||
if self._subsets_by_parent_id is None:
|
||||
self._subsets_by_parent_id = collections.defaultdict(list)
|
||||
for subset in self.dbcon.find({"type": "subset"}):
|
||||
|
|
@ -515,6 +633,11 @@ class SyncEntitiesFactory:
|
|||
|
||||
@property
|
||||
def all_ftrack_names(self):
|
||||
"""
|
||||
Returns lists of names of all entities in Ftrack
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
return [
|
||||
ent_dict["name"] for ent_dict in self.entities_dict.values() if (
|
||||
ent_dict.get("name")
|
||||
|
|
@ -534,8 +657,9 @@ class SyncEntitiesFactory:
|
|||
name = entity_dict["name"]
|
||||
entity_type = entity_dict["entity_type"]
|
||||
# Tasks must be checked too
|
||||
for task_name in entity_dict["tasks"]:
|
||||
passed = task_names.get(task_name)
|
||||
for task in entity_dict["tasks"].items():
|
||||
task_name, task = task
|
||||
passed = task_name
|
||||
if passed is None:
|
||||
passed = check_regex(
|
||||
task_name, "task", schema_patterns=_schema_patterns
|
||||
|
|
@ -1014,22 +1138,26 @@ class SyncEntitiesFactory:
|
|||
if not msg or not items:
|
||||
continue
|
||||
self.report_items["warning"][msg] = items
|
||||
|
||||
tasks = {}
|
||||
for tt in task_types:
|
||||
tasks[tt["name"]] = {
|
||||
"short_name": get_task_short_name(tt["name"])
|
||||
}
|
||||
self.entities_dict[id]["final_entity"]["config"] = {
|
||||
"tasks": [{"name": tt["name"]} for tt in task_types],
|
||||
"tasks": tasks,
|
||||
"apps": proj_apps
|
||||
}
|
||||
continue
|
||||
|
||||
ent_path_items = [ent["name"] for ent in entity["link"]]
|
||||
parents = ent_path_items[1:len(ent_path_items)-1:]
|
||||
parents = ent_path_items[1:len(ent_path_items) - 1:]
|
||||
hierarchy = ""
|
||||
if len(parents) > 0:
|
||||
hierarchy = os.path.sep.join(parents)
|
||||
|
||||
data["parents"] = parents
|
||||
data["hierarchy"] = hierarchy
|
||||
data["tasks"] = self.entities_dict[id].pop("tasks", [])
|
||||
data["tasks"] = self.entities_dict[id].pop("tasks", {})
|
||||
self.entities_dict[id]["final_entity"]["data"] = data
|
||||
self.entities_dict[id]["final_entity"]["type"] = "asset"
|
||||
|
||||
|
|
@ -1141,7 +1269,7 @@ class SyncEntitiesFactory:
|
|||
if not is_right and not else_match_better:
|
||||
entity = entity_dict["entity"]
|
||||
ent_path_items = [ent["name"] for ent in entity["link"]]
|
||||
parents = ent_path_items[1:len(ent_path_items)-1:]
|
||||
parents = ent_path_items[1:len(ent_path_items) - 1:]
|
||||
av_parents = av_ent_by_mongo_id["data"]["parents"]
|
||||
if av_parents == parents:
|
||||
is_right = True
|
||||
|
|
@ -1904,10 +2032,10 @@ class SyncEntitiesFactory:
|
|||
filter = {"_id": ObjectId(mongo_id)}
|
||||
change_data = from_dict_to_set(changes)
|
||||
mongo_changes_bulk.append(UpdateOne(filter, change_data))
|
||||
|
||||
if not mongo_changes_bulk:
|
||||
# TODO LOG
|
||||
return
|
||||
log.debug("mongo_changes_bulk:: {}".format(mongo_changes_bulk))
|
||||
self.dbcon.bulk_write(mongo_changes_bulk)
|
||||
|
||||
def reload_parents(self, hierarchy_changing_ids):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import functools
|
|||
import time
|
||||
from pype.api import Logger
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack.ftrack_server.lib import SocketSession
|
||||
from pype.modules.ftrack import ftrack_server
|
||||
|
||||
|
||||
class MissingPermision(Exception):
|
||||
|
|
@ -41,7 +41,7 @@ class BaseHandler(object):
|
|||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
if not(
|
||||
isinstance(session, ftrack_api.session.Session) or
|
||||
isinstance(session, SocketSession)
|
||||
isinstance(session, ftrack_server.lib.SocketSession)
|
||||
):
|
||||
raise Exception((
|
||||
"Session object entered with args is instance of \"{}\""
|
||||
|
|
@ -49,7 +49,7 @@ class BaseHandler(object):
|
|||
).format(
|
||||
str(type(session)),
|
||||
str(ftrack_api.session.Session),
|
||||
str(SocketSession)
|
||||
str(ftrack_server.lib.SocketSession)
|
||||
))
|
||||
|
||||
self._session = session
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import requests
|
||||
from avalon import style
|
||||
from pype.modules.ftrack import credentials
|
||||
from pype.modules.ftrack.lib import credentials
|
||||
from . import login_tools
|
||||
from pype.api import resources
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
|
@ -238,6 +238,8 @@ class CredentialsDialog(QtWidgets.QDialog):
|
|||
|
||||
# If there is an existing server thread running we need to stop it.
|
||||
if self._login_server_thread:
|
||||
if self._login_server_thread.isAlive():
|
||||
self._login_server_thread.stop()
|
||||
self._login_server_thread.join()
|
||||
self._login_server_thread = None
|
||||
|
||||
|
|
|
|||
|
|
@ -61,12 +61,17 @@ class LoginServerThread(threading.Thread):
|
|||
def __init__(self, url, callback):
|
||||
self.url = url
|
||||
self.callback = callback
|
||||
self._server = None
|
||||
super(LoginServerThread, self).__init__()
|
||||
|
||||
def _handle_login(self, api_user, api_key):
|
||||
'''Login to server with *api_user* and *api_key*.'''
|
||||
self.callback(api_user, api_key)
|
||||
|
||||
def stop(self):
|
||||
if self._server:
|
||||
self._server.server_close()
|
||||
|
||||
def run(self):
|
||||
'''Listen for events.'''
|
||||
self._server = HTTPServer(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
from Qt import QtWidgets
|
||||
from pype.api import Logger
|
||||
from ..gui.app import LogsWindow
|
||||
|
||||
|
||||
class LoggingModule:
|
||||
|
|
@ -8,7 +6,13 @@ class LoggingModule:
|
|||
self.parent = parent
|
||||
self.log = Logger().get_logger(self.__class__.__name__, "logging")
|
||||
|
||||
self.window = None
|
||||
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
try:
|
||||
from .gui.app import LogsWindow
|
||||
self.window = LogsWindow()
|
||||
self.tray_menu = self._tray_menu
|
||||
except Exception:
|
||||
|
|
@ -18,12 +22,12 @@ class LoggingModule:
|
|||
|
||||
# Definition of Tray menu
|
||||
def _tray_menu(self, parent_menu):
|
||||
from Qt import QtWidgets
|
||||
# Menu for Tray App
|
||||
menu = QtWidgets.QMenu('Logging', parent_menu)
|
||||
# menu.setProperty('submenu', 'on')
|
||||
|
||||
show_action = QtWidgets.QAction("Show Logs", menu)
|
||||
show_action.triggered.connect(self.on_show_logs)
|
||||
show_action.triggered.connect(self._show_logs_gui)
|
||||
menu.addAction(show_action)
|
||||
|
||||
parent_menu.addMenu(menu)
|
||||
|
|
@ -34,5 +38,6 @@ class LoggingModule:
|
|||
def process_modules(self, modules):
|
||||
return
|
||||
|
||||
def on_show_logs(self):
|
||||
self.window.show()
|
||||
def _show_logs_gui(self):
|
||||
if self.window:
|
||||
self.window.show()
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
import appdirs
|
||||
from avalon import style
|
||||
from Qt import QtWidgets
|
||||
import os
|
||||
import json
|
||||
from .widget_login import MusterLogin
|
||||
from avalon.vendor import requests
|
||||
import appdirs
|
||||
import requests
|
||||
|
||||
|
||||
class MusterModule:
|
||||
|
|
@ -21,6 +18,11 @@ class MusterModule:
|
|||
self.cred_path = os.path.join(
|
||||
self.cred_folder_path, self.cred_filename
|
||||
)
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent, parent):
|
||||
from .widget_login import MusterLogin
|
||||
|
||||
self.main_parent = main_parent
|
||||
self.parent = parent
|
||||
self.widget_login = MusterLogin(main_parent, self)
|
||||
|
|
@ -38,10 +40,6 @@ class MusterModule:
|
|||
pass
|
||||
|
||||
def process_modules(self, modules):
|
||||
|
||||
def api_callback():
|
||||
self.aShowLogin.trigger()
|
||||
|
||||
if "RestApiServer" in modules:
|
||||
def api_show_login():
|
||||
self.aShowLogin.trigger()
|
||||
|
|
@ -51,13 +49,12 @@ class MusterModule:
|
|||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent):
|
||||
"""
|
||||
Add **change credentials** option to tray menu.
|
||||
"""
|
||||
"""Add **change credentials** option to tray menu."""
|
||||
from Qt import QtWidgets
|
||||
|
||||
# Menu for Tray App
|
||||
self.menu = QtWidgets.QMenu('Muster', parent)
|
||||
self.menu.setProperty('submenu', 'on')
|
||||
self.menu.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
# Actions
|
||||
self.aShowLogin = QtWidgets.QAction(
|
||||
|
|
@ -91,9 +88,9 @@ class MusterModule:
|
|||
if not MUSTER_REST_URL:
|
||||
raise AttributeError("Muster REST API url not set")
|
||||
params = {
|
||||
'username': username,
|
||||
'password': password
|
||||
}
|
||||
'username': username,
|
||||
'password': password
|
||||
}
|
||||
api_entry = '/api/login'
|
||||
response = self._requests_post(
|
||||
MUSTER_REST_URL + api_entry, params=params)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import os
|
|||
import sys
|
||||
import subprocess
|
||||
import pype
|
||||
from pype import tools
|
||||
|
||||
|
||||
class StandAlonePublishModule:
|
||||
|
|
@ -30,6 +29,7 @@ class StandAlonePublishModule:
|
|||
))
|
||||
|
||||
def show(self):
|
||||
from pype import tools
|
||||
standalone_publisher_tool_path = os.path.join(
|
||||
os.path.dirname(tools.__file__),
|
||||
"standalonepublish"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from .timers_manager import TimersManager
|
||||
from .widget_user_idle import WidgetUserIdle
|
||||
|
||||
CLASS_DEFINIION = TimersManager
|
||||
|
||||
|
|
|
|||
|
|
@ -1,21 +1,7 @@
|
|||
from .widget_user_idle import WidgetUserIdle, SignalHandler
|
||||
from pype.api import Logger, config
|
||||
from pype.api import Logger
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
""" Signleton implementation
|
||||
"""
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(
|
||||
Singleton, cls
|
||||
).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class TimersManager(metaclass=Singleton):
|
||||
class TimersManager:
|
||||
""" Handles about Timers.
|
||||
|
||||
Should be able to start/stop all timers at once.
|
||||
|
|
@ -41,7 +27,13 @@ class TimersManager(metaclass=Singleton):
|
|||
|
||||
self.idle_man = None
|
||||
self.signal_handler = None
|
||||
|
||||
self.trat_init(tray_widget, main_widget)
|
||||
|
||||
def trat_init(self, tray_widget, main_widget):
|
||||
from .widget_user_idle import WidgetUserIdle, SignalHandler
|
||||
self.widget_user_idle = WidgetUserIdle(self, tray_widget)
|
||||
self.signal_handler = SignalHandler(self)
|
||||
|
||||
def set_signal_times(self):
|
||||
try:
|
||||
|
|
@ -119,7 +111,6 @@ class TimersManager(metaclass=Singleton):
|
|||
"""
|
||||
|
||||
if 'IdleManager' in modules:
|
||||
self.signal_handler = SignalHandler(self)
|
||||
if self.set_signal_times() is True:
|
||||
self.register_to_idle_manager(modules['IdleManager'])
|
||||
|
||||
|
|
|
|||
|
|
@ -3,8 +3,6 @@ import json
|
|||
import getpass
|
||||
|
||||
import appdirs
|
||||
from Qt import QtWidgets
|
||||
from .widget_user import UserWidget
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
|
|
@ -24,6 +22,12 @@ class UserModule:
|
|||
self.cred_path = os.path.normpath(os.path.join(
|
||||
self.cred_folder_path, self.cred_filename
|
||||
))
|
||||
self.widget_login = None
|
||||
|
||||
self.tray_init(main_parent, parent)
|
||||
|
||||
def tray_init(self, main_parent=None, parent=None):
|
||||
from .widget_user import UserWidget
|
||||
self.widget_login = UserWidget(self)
|
||||
|
||||
self.load_credentials()
|
||||
|
|
@ -66,6 +70,7 @@ class UserModule:
|
|||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
from Qt import QtWidgets
|
||||
"""Add menu or action to Tray(or parent)'s menu"""
|
||||
action = QtWidgets.QAction("Username", parent_menu)
|
||||
action.triggered.connect(self.show_widget)
|
||||
|
|
@ -121,7 +126,8 @@ class UserModule:
|
|||
|
||||
self.cred = {"username": username}
|
||||
os.environ[self.env_name] = username
|
||||
self.widget_login.set_user(username)
|
||||
if self.widget_login:
|
||||
self.widget_login.set_user(username)
|
||||
try:
|
||||
file = open(self.cred_path, "w")
|
||||
file.write(json.dumps(self.cred))
|
||||
|
|
|
|||
|
|
@ -31,12 +31,13 @@ class WebSocketServer():
|
|||
self.client = None
|
||||
self.handlers = {}
|
||||
|
||||
port = None
|
||||
websocket_url = os.getenv("WEBSOCKET_URL")
|
||||
if websocket_url:
|
||||
parsed = urllib.parse.urlparse(websocket_url)
|
||||
port = parsed.port
|
||||
if not port:
|
||||
port = 8099 # fallback
|
||||
port = 8098 # fallback
|
||||
|
||||
self.app = web.Application()
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import sys
|
|||
import six
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
from pprint import pformat
|
||||
|
||||
try:
|
||||
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_AUTO_SYNC
|
||||
|
|
@ -46,9 +45,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
hierarchy_context = self.context.data["hierarchyContext"]
|
||||
|
||||
self.log.debug(
|
||||
f"__ hierarchy_context: `{pformat(hierarchy_context)}`")
|
||||
|
||||
self.session = self.context.data["ftrackSession"]
|
||||
project_name = self.context.data["projectEntity"]["name"]
|
||||
query = 'Project where full_name is "{}"'.format(project_name)
|
||||
|
|
|
|||
|
|
@ -23,123 +23,256 @@ Provides:
|
|||
|
||||
import copy
|
||||
import json
|
||||
import collections
|
||||
|
||||
from avalon import io
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAnatomyInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect Instance specific Anatomy data."""
|
||||
class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
||||
"""Collect Instance specific Anatomy data.
|
||||
|
||||
Plugin is running for all instances on context even not active instances.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.49
|
||||
label = "Collect Anatomy Instance data"
|
||||
|
||||
def process(self, instance):
|
||||
# get all the stuff from the database
|
||||
anatomy_data = copy.deepcopy(instance.context.data["anatomyData"])
|
||||
project_entity = instance.context.data["projectEntity"]
|
||||
context_asset_entity = instance.context.data["assetEntity"]
|
||||
instance_asset_entity = instance.data.get("assetEntity")
|
||||
def process(self, context):
|
||||
self.log.info("Collecting anatomy data for all instances.")
|
||||
|
||||
asset_name = instance.data["asset"]
|
||||
self.fill_missing_asset_docs(context)
|
||||
self.fill_latest_versions(context)
|
||||
self.fill_anatomy_data(context)
|
||||
|
||||
# There is possibility that assetEntity on instance is already set
|
||||
# which can happen in standalone publisher
|
||||
if (
|
||||
instance_asset_entity
|
||||
and instance_asset_entity["name"] == asset_name
|
||||
):
|
||||
asset_entity = instance_asset_entity
|
||||
self.log.info("Anatomy Data collection finished.")
|
||||
|
||||
# Check if asset name is the same as what is in context
|
||||
# - they may be different, e.g. in NukeStudio
|
||||
elif context_asset_entity["name"] == asset_name:
|
||||
asset_entity = context_asset_entity
|
||||
def fill_missing_asset_docs(self, context):
|
||||
self.log.debug("Qeurying asset documents for instances.")
|
||||
|
||||
else:
|
||||
asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project_entity["_id"]
|
||||
})
|
||||
context_asset_doc = context.data["assetEntity"]
|
||||
|
||||
subset_name = instance.data["subset"]
|
||||
version_number = instance.data.get("version")
|
||||
latest_version = None
|
||||
instances_with_missing_asset_doc = collections.defaultdict(list)
|
||||
for instance in context:
|
||||
instance_asset_doc = instance.data.get("assetEntity")
|
||||
_asset_name = instance.data["asset"]
|
||||
|
||||
if asset_entity:
|
||||
subset_entity = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_entity["_id"]
|
||||
})
|
||||
# There is possibility that assetEntity on instance is already set
|
||||
# which can happen in standalone publisher
|
||||
if (
|
||||
instance_asset_doc
|
||||
and instance_asset_doc["name"] == _asset_name
|
||||
):
|
||||
continue
|
||||
|
||||
# Check if asset name is the same as what is in context
|
||||
# - they may be different, e.g. in NukeStudio
|
||||
if context_asset_doc["name"] == _asset_name:
|
||||
instance.data["assetEntity"] = context_asset_doc
|
||||
|
||||
if subset_entity is None:
|
||||
self.log.debug("Subset entity does not exist yet.")
|
||||
else:
|
||||
version_entity = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset_entity["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if version_entity:
|
||||
latest_version = version_entity["name"]
|
||||
instances_with_missing_asset_doc[_asset_name].append(instance)
|
||||
|
||||
# If version is not specified for instance or context
|
||||
if version_number is None:
|
||||
# TODO we should be able to change default version by studio
|
||||
# preferences (like start with version number `0`)
|
||||
version_number = 1
|
||||
# use latest version (+1) if already any exist
|
||||
if latest_version is not None:
|
||||
version_number += int(latest_version)
|
||||
if not instances_with_missing_asset_doc:
|
||||
self.log.debug("All instances already had right asset document.")
|
||||
return
|
||||
|
||||
anatomy_updates = {
|
||||
"asset": asset_name,
|
||||
"family": instance.data["family"],
|
||||
"subset": subset_name,
|
||||
"version": version_number
|
||||
asset_names = list(instances_with_missing_asset_doc.keys())
|
||||
self.log.debug("Querying asset documents with names: {}".format(
|
||||
", ".join(["\"{}\"".format(name) for name in asset_names])
|
||||
))
|
||||
asset_docs = io.find({
|
||||
"type": "asset",
|
||||
"name": {"$in": asset_names}
|
||||
})
|
||||
asset_docs_by_name = {
|
||||
asset_doc["name"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
if (
|
||||
asset_entity
|
||||
and asset_entity["_id"] != context_asset_entity["_id"]
|
||||
):
|
||||
parents = asset_entity["data"].get("parents") or list()
|
||||
anatomy_updates["hierarchy"] = "/".join(parents)
|
||||
|
||||
task_name = instance.data.get("task")
|
||||
if task_name:
|
||||
anatomy_updates["task"] = task_name
|
||||
not_found_asset_names = []
|
||||
for asset_name, instances in instances_with_missing_asset_doc.items():
|
||||
asset_doc = asset_docs_by_name.get(asset_name)
|
||||
if not asset_doc:
|
||||
not_found_asset_names.append(asset_name)
|
||||
continue
|
||||
|
||||
# Version should not be collected since may be instance
|
||||
anatomy_data.update(anatomy_updates)
|
||||
for _instance in instances:
|
||||
_instance.data["assetEntity"] = asset_doc
|
||||
|
||||
resolution_width = instance.data.get("resolutionWidth")
|
||||
if resolution_width:
|
||||
anatomy_data["resolution_width"] = resolution_width
|
||||
if not_found_asset_names:
|
||||
joined_asset_names = ", ".join(
|
||||
["\"{}\"".format(name) for name in not_found_asset_names]
|
||||
)
|
||||
self.log.warning((
|
||||
"Not found asset documents with names \"{}\"."
|
||||
).format(joined_asset_names))
|
||||
|
||||
resolution_height = instance.data.get("resolutionHeight")
|
||||
if resolution_height:
|
||||
anatomy_data["resolution_height"] = resolution_height
|
||||
def fill_latest_versions(self, context):
|
||||
"""Try to find latest version for each instance's subset.
|
||||
|
||||
pixel_aspect = instance.data.get("pixelAspect")
|
||||
if pixel_aspect:
|
||||
anatomy_data["pixel_aspect"] = float("{:0.2f}".format(
|
||||
float(pixel_aspect)))
|
||||
Key "latestVersion" is always set to latest version or `None`.
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps:
|
||||
anatomy_data["fps"] = float("{:0.2f}".format(
|
||||
float(fps)))
|
||||
Args:
|
||||
context (pyblish.Context)
|
||||
|
||||
instance.data["projectEntity"] = project_entity
|
||||
instance.data["assetEntity"] = asset_entity
|
||||
instance.data["anatomyData"] = anatomy_data
|
||||
instance.data["latestVersion"] = latest_version
|
||||
# TODO should be version number set here?
|
||||
instance.data["version"] = version_number
|
||||
Returns:
|
||||
None
|
||||
|
||||
self.log.info("Instance anatomy Data collected")
|
||||
self.log.debug(json.dumps(anatomy_data, indent=4))
|
||||
"""
|
||||
self.log.debug("Qeurying latest versions for instances.")
|
||||
|
||||
hierarchy = {}
|
||||
subset_names = set()
|
||||
asset_ids = set()
|
||||
for instance in context:
|
||||
# Make sure `"latestVersion"` key is set
|
||||
latest_version = instance.data.get("latestVersion")
|
||||
instance.data["latestVersion"] = latest_version
|
||||
|
||||
# Skip instances withou "assetEntity"
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if not asset_doc:
|
||||
continue
|
||||
|
||||
# Store asset ids and subset names for queries
|
||||
asset_id = asset_doc["_id"]
|
||||
subset_name = instance.data["subset"]
|
||||
asset_ids.add(asset_id)
|
||||
subset_names.add(subset_name)
|
||||
|
||||
# Prepare instance hiearchy for faster filling latest versions
|
||||
if asset_id not in hierarchy:
|
||||
hierarchy[asset_id] = {}
|
||||
if subset_name not in hierarchy[asset_id]:
|
||||
hierarchy[asset_id][subset_name] = []
|
||||
hierarchy[asset_id][subset_name].append(instance)
|
||||
|
||||
subset_docs = list(io.find({
|
||||
"type": "subset",
|
||||
"parent": {"$in": list(asset_ids)},
|
||||
"name": {"$in": list(subset_names)}
|
||||
}))
|
||||
|
||||
subset_ids = [
|
||||
subset_doc["_id"]
|
||||
for subset_doc in subset_docs
|
||||
]
|
||||
|
||||
last_version_by_subset_id = self._query_last_versions(subset_ids)
|
||||
for subset_doc in subset_docs:
|
||||
subset_id = subset_doc["_id"]
|
||||
last_version = last_version_by_subset_id.get(subset_id)
|
||||
if last_version is None:
|
||||
continue
|
||||
|
||||
asset_id = subset_doc["parent"]
|
||||
subset_name = subset_doc["name"]
|
||||
_instances = hierarchy[asset_id][subset_name]
|
||||
for _instance in _instances:
|
||||
_instance.data["latestVersion"] = last_version
|
||||
|
||||
def _query_last_versions(self, subset_ids):
|
||||
"""Retrieve all latest versions for entered subset_ids.
|
||||
|
||||
Args:
|
||||
subset_ids (list): List of subset ids with type `ObjectId`.
|
||||
|
||||
Returns:
|
||||
dict: Key is subset id and value is last version name.
|
||||
"""
|
||||
_pipeline = [
|
||||
# Find all versions of those subsets
|
||||
{"$match": {
|
||||
"type": "version",
|
||||
"parent": {"$in": subset_ids}
|
||||
}},
|
||||
# Sorting versions all together
|
||||
{"$sort": {"name": 1}},
|
||||
# Group them by "parent", but only take the last
|
||||
{"$group": {
|
||||
"_id": "$parent",
|
||||
"_version_id": {"$last": "$_id"},
|
||||
"name": {"$last": "$name"}
|
||||
}}
|
||||
]
|
||||
|
||||
last_version_by_subset_id = {}
|
||||
for doc in io.aggregate(_pipeline):
|
||||
subset_id = doc["_id"]
|
||||
last_version_by_subset_id[subset_id] = doc["name"]
|
||||
|
||||
return last_version_by_subset_id
|
||||
|
||||
def fill_anatomy_data(self, context):
|
||||
self.log.debug("Storing anatomy data to instance data.")
|
||||
|
||||
project_doc = context.data["projectEntity"]
|
||||
context_asset_doc = context.data["assetEntity"]
|
||||
|
||||
for instance in context:
|
||||
version_number = instance.data.get("version")
|
||||
# If version is not specified for instance or context
|
||||
if version_number is None:
|
||||
# TODO we should be able to change default version by studio
|
||||
# preferences (like start with version number `0`)
|
||||
version_number = 1
|
||||
# use latest version (+1) if already any exist
|
||||
latest_version = instance.data["latestVersion"]
|
||||
if latest_version is not None:
|
||||
version_number += int(latest_version)
|
||||
|
||||
anatomy_updates = {
|
||||
"asset": instance.data["asset"],
|
||||
"family": instance.data["family"],
|
||||
"subset": instance.data["subset"],
|
||||
"version": version_number
|
||||
}
|
||||
|
||||
# Hiearchy
|
||||
asset_doc = instance.data.get("assetEntity")
|
||||
if asset_doc and asset_doc["_id"] != context_asset_doc["_id"]:
|
||||
parents = asset_doc["data"].get("parents") or list()
|
||||
anatomy_updates["hierarchy"] = "/".join(parents)
|
||||
|
||||
# Task
|
||||
task_name = instance.data.get("task")
|
||||
if task_name:
|
||||
anatomy_updates["task"] = task_name
|
||||
|
||||
# Additional data
|
||||
resolution_width = instance.data.get("resolutionWidth")
|
||||
if resolution_width:
|
||||
anatomy_updates["resolution_width"] = resolution_width
|
||||
|
||||
resolution_height = instance.data.get("resolutionHeight")
|
||||
if resolution_height:
|
||||
anatomy_updates["resolution_height"] = resolution_height
|
||||
|
||||
pixel_aspect = instance.data.get("pixelAspect")
|
||||
if pixel_aspect:
|
||||
anatomy_updates["pixel_aspect"] = float(
|
||||
"{:0.2f}".format(float(pixel_aspect))
|
||||
)
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps:
|
||||
anatomy_updates["fps"] = float("{:0.2f}".format(float(fps)))
|
||||
|
||||
anatomy_data = copy.deepcopy(context.data["anatomyData"])
|
||||
anatomy_data.update(anatomy_updates)
|
||||
|
||||
# Store anatomy data
|
||||
instance.data["projectEntity"] = project_doc
|
||||
instance.data["anatomyData"] = anatomy_data
|
||||
instance.data["version"] = version_number
|
||||
|
||||
# Log collected data
|
||||
instance_name = instance.data["name"]
|
||||
instance_label = instance.data.get("label")
|
||||
if instance_label:
|
||||
instance_name += "({})".format(instance_label)
|
||||
self.log.debug("Anatomy data for instance {}: {}".format(
|
||||
instance_name,
|
||||
json.dumps(anatomy_data, indent=4)
|
||||
))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
||||
"""Create entities in Avalon based on collected data."""
|
||||
|
|
@ -14,14 +14,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if "hierarchyContext" not in context.data:
|
||||
self.log.info("skipping IntegrateHierarchyToAvalon")
|
||||
return
|
||||
hierarchy_context = deepcopy(context.data["hierarchyContext"])
|
||||
|
||||
if not io.Session:
|
||||
io.install()
|
||||
|
||||
active_assets = []
|
||||
hierarchy_context = context.data["hierarchyContext"]
|
||||
hierarchy_assets = self._get_assets(hierarchy_context)
|
||||
|
||||
# filter only the active publishing insatnces
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
|
|
@ -32,13 +30,13 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
|
||||
active_assets.append(instance.data["asset"])
|
||||
|
||||
# filter out only assets which are activated as isntances
|
||||
new_hierarchy_assets = {k: v for k, v in hierarchy_assets.items()
|
||||
if k in active_assets}
|
||||
# remove duplicity in list
|
||||
self.active_assets = list(set(active_assets))
|
||||
self.log.debug("__ self.active_assets: {}".format(self.active_assets))
|
||||
|
||||
# modify the hierarchy context so there are only fitred assets
|
||||
self._set_assets(hierarchy_context, new_hierarchy_assets)
|
||||
hierarchy_context = self._get_assets(hierarchy_context)
|
||||
|
||||
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
|
||||
input_data = context.data["hierarchyContext"] = hierarchy_context
|
||||
|
||||
self.project = None
|
||||
|
|
@ -61,7 +59,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
data["inputs"] = entity_data.get("inputs", [])
|
||||
|
||||
# Tasks.
|
||||
tasks = entity_data.get("tasks", [])
|
||||
tasks = entity_data.get("tasks", {})
|
||||
if tasks is not None or len(tasks) > 0:
|
||||
data["tasks"] = tasks
|
||||
parents = []
|
||||
|
|
@ -101,11 +99,13 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if entity:
|
||||
# Do not override data, only update
|
||||
cur_entity_data = entity.get("data") or {}
|
||||
new_tasks = data.pop("tasks", [])
|
||||
new_tasks = data.pop("tasks", {})
|
||||
if "tasks" in cur_entity_data and new_tasks:
|
||||
for task_name in new_tasks:
|
||||
if task_name not in cur_entity_data["tasks"]:
|
||||
cur_entity_data["tasks"].append(task_name)
|
||||
for task_name in new_tasks.keys():
|
||||
if task_name \
|
||||
not in cur_entity_data["tasks"].keys():
|
||||
cur_entity_data["tasks"][task_name] = \
|
||||
new_tasks[task_name]
|
||||
cur_entity_data.update(data)
|
||||
data = cur_entity_data
|
||||
else:
|
||||
|
|
@ -178,35 +178,18 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
Usually the last part of deep dictionary which
|
||||
is not having any children
|
||||
"""
|
||||
input_dict_copy = deepcopy(input_dict)
|
||||
|
||||
for key in input_dict.keys():
|
||||
self.log.debug("__ key: {}".format(key))
|
||||
# check if child key is available
|
||||
if input_dict[key].get("childs"):
|
||||
# loop deeper
|
||||
return self._get_assets(input_dict[key]["childs"])
|
||||
input_dict_copy[key]["childs"] = self._get_assets(
|
||||
input_dict[key]["childs"])
|
||||
else:
|
||||
# give the dictionary with assets
|
||||
return input_dict
|
||||
# filter out unwanted assets
|
||||
if key not in self.active_assets:
|
||||
input_dict_copy.pop(key, None)
|
||||
|
||||
def _set_assets(self, input_dict, new_assets=None):
|
||||
""" Modify the hierarchy context dictionary.
|
||||
It will replace the asset dictionary with only the filtred one.
|
||||
"""
|
||||
for key in input_dict.keys():
|
||||
# check if child key is available
|
||||
if input_dict[key].get("childs"):
|
||||
# return if this is just for testing purpose and no
|
||||
# new_assets property is avalable
|
||||
if not new_assets:
|
||||
return True
|
||||
|
||||
# test for deeper inner children availabelity
|
||||
if self._set_assets(input_dict[key]["childs"]):
|
||||
# if one level deeper is still children available
|
||||
# then process farther
|
||||
self._set_assets(input_dict[key]["childs"], new_assets)
|
||||
else:
|
||||
# or just assign the filtred asset ditionary
|
||||
input_dict[key]["childs"] = new_assets
|
||||
else:
|
||||
# test didnt find more childs in input dictionary
|
||||
return None
|
||||
return input_dict_copy
|
||||
|
|
|
|||
|
|
@ -633,6 +633,26 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
input_width = int(input_data["width"])
|
||||
input_height = int(input_data["height"])
|
||||
|
||||
# Make sure input width and height is not an odd number
|
||||
input_width_is_odd = bool(input_width % 2 != 0)
|
||||
input_height_is_odd = bool(input_height % 2 != 0)
|
||||
if input_width_is_odd or input_height_is_odd:
|
||||
# Add padding to input and make sure this filter is at first place
|
||||
filters.append("pad=width=ceil(iw/2)*2:height=ceil(ih/2)*2")
|
||||
|
||||
# Change input width or height as first filter will change them
|
||||
if input_width_is_odd:
|
||||
self.log.info((
|
||||
"Converting input width from odd to even number. {} -> {}"
|
||||
).format(input_width, input_width + 1))
|
||||
input_width += 1
|
||||
|
||||
if input_height_is_odd:
|
||||
self.log.info((
|
||||
"Converting input height from odd to even number. {} -> {}"
|
||||
).format(input_height, input_height + 1))
|
||||
input_height += 1
|
||||
|
||||
self.log.debug("pixel_aspect: `{}`".format(pixel_aspect))
|
||||
self.log.debug("input_width: `{}`".format(input_width))
|
||||
self.log.debug("input_height: `{}`".format(input_height))
|
||||
|
|
@ -654,6 +674,22 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
output_width = int(output_width)
|
||||
output_height = int(output_height)
|
||||
|
||||
# Make sure output width and height is not an odd number
|
||||
# When this can happen:
|
||||
# - if output definition has set width and height with odd number
|
||||
# - `instance.data` contain width and height with odd numbeer
|
||||
if output_width % 2 != 0:
|
||||
self.log.warning((
|
||||
"Converting output width from odd to even number. {} -> {}"
|
||||
).format(output_width, output_width + 1))
|
||||
output_width += 1
|
||||
|
||||
if output_height % 2 != 0:
|
||||
self.log.warning((
|
||||
"Converting output height from odd to even number. {} -> {}"
|
||||
).format(output_height, output_height + 1))
|
||||
output_height += 1
|
||||
|
||||
self.log.debug(
|
||||
"Output resolution is {}x{}".format(output_width, output_height)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ from avalon import api, io
|
|||
import json
|
||||
import pype.hosts.maya.lib
|
||||
from collections import defaultdict
|
||||
from pype.widgets.message_window import ScrollMessageBox
|
||||
from Qt import QtWidgets
|
||||
|
||||
|
||||
class LookLoader(pype.hosts.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -44,18 +46,33 @@ class LookLoader(pype.hosts.maya.plugin.ReferenceLoader):
|
|||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""
|
||||
Called by Scene Inventory when look should be updated to current
|
||||
version.
|
||||
If any reference edits cannot be applied, eg. shader renamed and
|
||||
material not present, reference is unloaded and cleaned.
|
||||
All failed edits are highlighted to the user via message box.
|
||||
|
||||
Args:
|
||||
container: object that has look to be updated
|
||||
representation: (dict): relationship data to get proper
|
||||
representation from DB and persisted
|
||||
data in .json
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
import os
|
||||
from maya import cmds
|
||||
|
||||
node = container["objectName"]
|
||||
|
||||
path = api.get_representation_path(representation)
|
||||
|
||||
# Get reference node from container members
|
||||
members = cmds.sets(node, query=True, nodesOnly=True)
|
||||
reference_node = self._get_reference_node(members)
|
||||
|
||||
shader_nodes = cmds.ls(members, type='shadingEngine')
|
||||
orig_nodes = set(self._get_nodes_with_shader(shader_nodes))
|
||||
|
||||
file_type = {
|
||||
"ma": "mayaAscii",
|
||||
"mb": "mayaBinary",
|
||||
|
|
@ -66,6 +83,104 @@ class LookLoader(pype.hosts.maya.plugin.ReferenceLoader):
|
|||
|
||||
assert os.path.exists(path), "%s does not exist." % path
|
||||
|
||||
self._load_reference(file_type, node, path, reference_node)
|
||||
|
||||
# Remove any placeHolderList attribute entries from the set that
|
||||
# are remaining from nodes being removed from the referenced file.
|
||||
members = cmds.sets(node, query=True)
|
||||
invalid = [x for x in members if ".placeHolderList" in x]
|
||||
if invalid:
|
||||
cmds.sets(invalid, remove=node)
|
||||
|
||||
# get new applied shaders and nodes from new version
|
||||
shader_nodes = cmds.ls(members, type='shadingEngine')
|
||||
nodes = set(self._get_nodes_with_shader(shader_nodes))
|
||||
|
||||
json_representation = io.find_one({
|
||||
"type": "representation",
|
||||
"parent": representation['parent'],
|
||||
"name": "json"
|
||||
})
|
||||
|
||||
# Load relationships
|
||||
shader_relation = api.get_representation_path(json_representation)
|
||||
with open(shader_relation, "r") as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
# update of reference could result in failed edits - material is not
|
||||
# present because of renaming etc.
|
||||
failed_edits = cmds.referenceQuery(reference_node,
|
||||
editStrings=True,
|
||||
failedEdits=True,
|
||||
successfulEdits=False)
|
||||
|
||||
# highlight failed edits to user
|
||||
if failed_edits:
|
||||
# clean references - removes failed reference edits
|
||||
cmds.file(cr=reference_node) # cleanReference
|
||||
|
||||
# reapply shading groups from json representation on orig nodes
|
||||
pype.hosts.maya.lib.apply_shaders(relationships,
|
||||
shader_nodes,
|
||||
orig_nodes)
|
||||
|
||||
msg = ["During reference update some edits failed.",
|
||||
"All successful edits were kept intact.\n",
|
||||
"Failed and removed edits:"]
|
||||
msg.extend(failed_edits)
|
||||
msg = ScrollMessageBox(QtWidgets.QMessageBox.Warning,
|
||||
"Some reference edit failed",
|
||||
msg)
|
||||
msg.exec_()
|
||||
|
||||
attributes = relationships.get("attributes", [])
|
||||
|
||||
# region compute lookup
|
||||
nodes_by_id = defaultdict(list)
|
||||
for n in nodes:
|
||||
nodes_by_id[pype.hosts.maya.lib.get_id(n)].append(n)
|
||||
pype.hosts.maya.lib.apply_attributes(attributes, nodes_by_id)
|
||||
|
||||
# Update metadata
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
def _get_nodes_with_shader(self, shader_nodes):
|
||||
"""
|
||||
Returns list of nodes belonging to specific shaders
|
||||
Args:
|
||||
shader_nodes: <list> of Shader groups
|
||||
Returns
|
||||
<list> node names
|
||||
"""
|
||||
import maya.cmds as cmds
|
||||
# Get container members
|
||||
|
||||
nodes_list = []
|
||||
for shader in shader_nodes:
|
||||
connections = cmds.listConnections(cmds.listHistory(shader, f=1),
|
||||
type='mesh')
|
||||
if connections:
|
||||
for connection in connections:
|
||||
nodes_list.extend(cmds.listRelatives(connection,
|
||||
shapes=True))
|
||||
return nodes_list
|
||||
|
||||
def _load_reference(self, file_type, node, path, reference_node):
|
||||
"""
|
||||
Load reference from 'path' on 'reference_node'. Used when change
|
||||
of look (version/update) is triggered.
|
||||
Args:
|
||||
file_type: extension of referenced file
|
||||
node:
|
||||
path: (string) location of referenced file
|
||||
reference_node: (string) - name of node that should be applied
|
||||
on
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
import maya.cmds as cmds
|
||||
try:
|
||||
content = cmds.file(path,
|
||||
loadReference=reference_node,
|
||||
|
|
@ -86,57 +201,10 @@ class LookLoader(pype.hosts.maya.plugin.ReferenceLoader):
|
|||
raise
|
||||
|
||||
self.log.warning("Ignoring file read error:\n%s", exc)
|
||||
|
||||
# Fix PLN-40 for older containers created with Avalon that had the
|
||||
# `.verticesOnlySet` set to True.
|
||||
if cmds.getAttr("{}.verticesOnlySet".format(node)):
|
||||
self.log.info("Setting %s.verticesOnlySet to False", node)
|
||||
cmds.setAttr("{}.verticesOnlySet".format(node), False)
|
||||
|
||||
# Add new nodes of the reference to the container
|
||||
cmds.sets(content, forceElement=node)
|
||||
|
||||
# Remove any placeHolderList attribute entries from the set that
|
||||
# are remaining from nodes being removed from the referenced file.
|
||||
members = cmds.sets(node, query=True)
|
||||
invalid = [x for x in members if ".placeHolderList" in x]
|
||||
if invalid:
|
||||
cmds.sets(invalid, remove=node)
|
||||
|
||||
# Get container members
|
||||
shader_nodes = cmds.ls(members, type='shadingEngine')
|
||||
|
||||
nodes_list = []
|
||||
for shader in shader_nodes:
|
||||
connections = cmds.listConnections(cmds.listHistory(shader, f=1),
|
||||
type='mesh')
|
||||
if connections:
|
||||
for connection in connections:
|
||||
nodes_list.extend(cmds.listRelatives(connection,
|
||||
shapes=True))
|
||||
nodes = set(nodes_list)
|
||||
|
||||
json_representation = io.find_one({
|
||||
"type": "representation",
|
||||
"parent": representation['parent'],
|
||||
"name": "json"
|
||||
})
|
||||
|
||||
# Load relationships
|
||||
shader_relation = api.get_representation_path(json_representation)
|
||||
with open(shader_relation, "r") as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
attributes = relationships.get("attributes", [])
|
||||
|
||||
# region compute lookup
|
||||
nodes_by_id = defaultdict(list)
|
||||
for n in nodes:
|
||||
nodes_by_id[pype.hosts.maya.lib.get_id(n)].append(n)
|
||||
|
||||
pype.hosts.maya.lib.apply_attributes(attributes, nodes_by_id)
|
||||
|
||||
# Update metadata
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ class CollectYetiCache(pyblish.api.InstancePlugin):
|
|||
label = "Collect Yeti Cache"
|
||||
families = ["yetiRig", "yeticache"]
|
||||
hosts = ["maya"]
|
||||
tasks = ["animation", "fx"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ class ExtractCameraMayaScene(pype.api.Extractor):
|
|||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
except KeyError:
|
||||
# no preset found
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class ExtractMayaSceneRaw(pype.api.Extractor):
|
|||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
except KeyError:
|
||||
# no preset found
|
||||
pass
|
||||
# Define extract output file path
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ class ExtractModel(pype.api.Extractor):
|
|||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
except KeyError:
|
||||
# no preset found
|
||||
pass
|
||||
# Define extract output file path
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
self.log.info(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
except KeyError:
|
||||
# no preset found
|
||||
pass
|
||||
yeti_nodes = cmds.ls(instance, type="pgYetiMaya")
|
||||
|
|
|
|||
|
|
@ -262,6 +262,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
use_published = True
|
||||
tile_assembler_plugin = "PypeTileAssembler"
|
||||
asset_dependencies = False
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
|
|
@ -417,9 +418,10 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Adding file dependencies.
|
||||
dependencies = instance.context.data["fileDependencies"]
|
||||
dependencies.append(filepath)
|
||||
for dependency in dependencies:
|
||||
key = "AssetDependency" + str(dependencies.index(dependency))
|
||||
payload_skeleton["JobInfo"][key] = dependency
|
||||
if self.assembly_files:
|
||||
for dependency in dependencies:
|
||||
key = "AssetDependency" + str(dependencies.index(dependency))
|
||||
payload_skeleton["JobInfo"][key] = dependency
|
||||
|
||||
# Handle environments -----------------------------------------------
|
||||
# We need those to pass them to pype for it to set correct context
|
||||
|
|
@ -731,10 +733,14 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
def _get_maya_payload(self, data):
|
||||
payload = copy.deepcopy(payload_skeleton)
|
||||
|
||||
job_info_ext = {
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": data["filepath"],
|
||||
}
|
||||
if not self.asset_dependencies:
|
||||
job_info_ext = {}
|
||||
|
||||
else:
|
||||
job_info_ext = {
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": data["filepath"],
|
||||
}
|
||||
|
||||
plugin_info = {
|
||||
"SceneFile": data["filepath"],
|
||||
|
|
|
|||
|
|
@ -40,10 +40,10 @@ class CollectShots(api.InstancePlugin):
|
|||
data["name"] = data["subset"] + "_" + data["asset"]
|
||||
|
||||
data["label"] = (
|
||||
"{} - {} - tasks: {} - assetbuilds: {} - comments: {}".format(
|
||||
"{} - {} - tasks:{} - assetbuilds:{} - comments:{}".format(
|
||||
data["asset"],
|
||||
data["subset"],
|
||||
data["tasks"],
|
||||
data["tasks"].keys(),
|
||||
[x["name"] for x in data.get("assetbuilds", [])],
|
||||
len(data.get("comments", []))
|
||||
)
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
|
|||
|
||||
# check if audio stream is in input video file
|
||||
ffprob_cmd = (
|
||||
"{ffprobe_path} -i {full_input_path} -show_streams "
|
||||
"{ffprobe_path} -i \"{full_input_path}\" -show_streams "
|
||||
"-select_streams a -loglevel error"
|
||||
).format(**locals())
|
||||
self.log.debug("ffprob_cmd: {}".format(ffprob_cmd))
|
||||
|
|
@ -106,7 +106,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
|
|||
# try to get video native resolution data
|
||||
try:
|
||||
resolution_output = pype.api.subprocess((
|
||||
"{ffprobe_path} -i {full_input_path} -v error "
|
||||
"{ffprobe_path} -i \"{full_input_path}\" -v error "
|
||||
"-select_streams v:0 -show_entries "
|
||||
"stream=width,height -of csv=s=x:p=0"
|
||||
).format(**locals()))
|
||||
|
|
@ -193,7 +193,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
|
|||
# append ffmpeg input video clip
|
||||
input_args.append("-ss {:0.2f}".format(start_sec))
|
||||
input_args.append("-t {:0.2f}".format(duration_sec))
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
input_args.append("-i \"{}\"".format(full_input_path))
|
||||
|
||||
# add copy audio video codec if only shortening clip
|
||||
if ("_cut-bigger" in tags) and (not empty_add):
|
||||
|
|
@ -203,8 +203,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
|
|||
output_args.append("-intra")
|
||||
|
||||
# output filename
|
||||
output_args.append("-y")
|
||||
output_args.append(full_output_path)
|
||||
output_args.append("-y \"{}\"".format(full_output_path))
|
||||
|
||||
mov_args = [
|
||||
ffmpeg_path,
|
||||
|
|
|
|||
|
|
@ -17,13 +17,13 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
|
|||
subsets = {
|
||||
"referenceMain": {
|
||||
"family": "review",
|
||||
"families": ["review", "ftrack"],
|
||||
"families": ["clip", "ftrack"],
|
||||
# "ftrackFamily": "review",
|
||||
"extension": ".mp4"
|
||||
},
|
||||
"audioMain": {
|
||||
"family": "audio",
|
||||
"families": ["ftrack"],
|
||||
"families": ["clip", "ftrack"],
|
||||
# "ftrackFamily": "audio",
|
||||
"extension": ".wav",
|
||||
# "version": 1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
"""
|
||||
Requires:
|
||||
Nothing
|
||||
|
||||
Provides:
|
||||
Instance
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectInstanceData(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collector with only one reason for its existence - remove 'ftrack'
|
||||
family implicitly added by Standalone Publisher
|
||||
"""
|
||||
|
||||
label = "Collect instance data"
|
||||
order = pyblish.api.CollectorOrder + 0.49
|
||||
families = ["render", "plate"]
|
||||
hosts = ["standalonepublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
fps = instance.data["assetEntity"]["data"]["fps"]
|
||||
instance.data.update({
|
||||
"fps": fps
|
||||
})
|
||||
self.log.debug(f"instance.data: {pformat(instance.data)}")
|
||||
|
|
@ -10,7 +10,7 @@ class ExtractShotData(pype.api.Extractor):
|
|||
|
||||
label = "Extract Shot Data"
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["review", "audio"]
|
||||
families = ["clip"]
|
||||
|
||||
# presets
|
||||
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
# Convert to jpeg if not yet
|
||||
full_input_path = os.path.join(thumbnail_repre["stagingDir"], file)
|
||||
full_input_path = '"{}"'.format(full_input_path)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
|
|
@ -9,10 +7,14 @@ class ValidateEditorialResources(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Validate Editorial Resources"
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["audio", "review"]
|
||||
families = ["clip"]
|
||||
|
||||
order = pype.api.ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug(
|
||||
f"Instance: {instance}, Families: "
|
||||
f"{[instance.data['family']] + instance.data['families']}")
|
||||
check_file = instance.data["editorialVideoPath"]
|
||||
msg = f"Missing \"{check_file}\"."
|
||||
assert check_file, msg
|
||||
|
|
|
|||
BIN
pype/resources/app_icons/hiero.png
Normal file
BIN
pype/resources/app_icons/hiero.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 45 KiB |
|
|
@ -15,7 +15,7 @@ ffprobe_path = pype.lib.get_ffmpeg_tool_path("ffprobe")
|
|||
|
||||
|
||||
FFMPEG = (
|
||||
'{} -loglevel panic -i %(input)s %(filters)s %(args)s%(output)s'
|
||||
'{} -loglevel panic -i "%(input)s" %(filters)s %(args)s%(output)s'
|
||||
).format(ffmpeg_path)
|
||||
|
||||
FFPROBE = (
|
||||
|
|
|
|||
|
|
@ -268,9 +268,10 @@ class DropDataFrame(QtWidgets.QFrame):
|
|||
args = [
|
||||
ffprobe_path,
|
||||
'-v', 'quiet',
|
||||
'-print_format', 'json',
|
||||
'-print_format json',
|
||||
'-show_format',
|
||||
'-show_streams', filepath
|
||||
'-show_streams',
|
||||
'"{}"'.format(filepath)
|
||||
]
|
||||
ffprobe_p = subprocess.Popen(
|
||||
' '.join(args),
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "2.12.1"
|
||||
__version__ = "2.12.2"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from Qt import QtWidgets
|
||||
from Qt import QtWidgets, QtCore
|
||||
import sys
|
||||
import logging
|
||||
|
||||
|
|
@ -49,6 +49,17 @@ class Window(QtWidgets.QWidget):
|
|||
|
||||
|
||||
def message(title=None, message=None, level="info", parent=None):
|
||||
"""
|
||||
Produces centered dialog with specific level denoting severity
|
||||
Args:
|
||||
title: (string) dialog title
|
||||
message: (string) message
|
||||
level: (string) info|warning|critical
|
||||
parent: (QtWidgets.QApplication)
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
app = parent
|
||||
if not app:
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
|
|
@ -68,3 +79,60 @@ def message(title=None, message=None, level="info", parent=None):
|
|||
# skip all possible issues that may happen feature is not crutial
|
||||
log.warning("Couldn't center message.", exc_info=True)
|
||||
# sys.exit(app.exec_())
|
||||
|
||||
|
||||
class ScrollMessageBox(QtWidgets.QDialog):
|
||||
"""
|
||||
Basic version of scrollable QMessageBox. No other existing dialog
|
||||
implementation is scrollable.
|
||||
Args:
|
||||
icon: <QtWidgets.QMessageBox.Icon>
|
||||
title: <string>
|
||||
messages: <list> of messages
|
||||
cancelable: <boolean> - True if Cancel button should be added
|
||||
"""
|
||||
def __init__(self, icon, title, messages, cancelable=False):
|
||||
super(ScrollMessageBox, self).__init__()
|
||||
self.setWindowTitle(title)
|
||||
self.icon = icon
|
||||
|
||||
self.setWindowFlags(QtCore.Qt.WindowTitleHint)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
|
||||
scroll_widget = QtWidgets.QScrollArea(self)
|
||||
scroll_widget.setWidgetResizable(True)
|
||||
content_widget = QtWidgets.QWidget(self)
|
||||
scroll_widget.setWidget(content_widget)
|
||||
|
||||
max_len = 0
|
||||
content_layout = QtWidgets.QVBoxLayout(content_widget)
|
||||
for message in messages:
|
||||
label_widget = QtWidgets.QLabel(message, content_widget)
|
||||
content_layout.addWidget(label_widget)
|
||||
max_len = max(max_len, len(message))
|
||||
|
||||
# guess size of scrollable area
|
||||
max_width = QtWidgets.QApplication.desktop().availableGeometry().width
|
||||
scroll_widget.setMinimumWidth(min(max_width, max_len * 6))
|
||||
layout.addWidget(scroll_widget)
|
||||
|
||||
if not cancelable: # if no specific buttons OK only
|
||||
buttons = QtWidgets.QDialogButtonBox.Ok
|
||||
else:
|
||||
buttons = QtWidgets.QDialogButtonBox.Ok | \
|
||||
QtWidgets.QDialogButtonBox.Cancel
|
||||
|
||||
btn_box = QtWidgets.QDialogButtonBox(buttons)
|
||||
btn_box.accepted.connect(self.accept)
|
||||
|
||||
if cancelable:
|
||||
btn_box.reject.connect(self.reject)
|
||||
|
||||
btn = QtWidgets.QPushButton('Copy to clipboard')
|
||||
btn.clicked.connect(lambda: QtWidgets.QApplication.
|
||||
clipboard().setText("\n".join(messages)))
|
||||
btn_box.addButton(btn, QtWidgets.QDialogButtonBox.NoRole)
|
||||
|
||||
layout.addWidget(btn_box)
|
||||
self.show()
|
||||
|
|
|
|||
87
schema/config-1.1.json
Normal file
87
schema/config-1.1.json
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
|
||||
"title": "pype:config-1.1",
|
||||
"description": "A project configuration.",
|
||||
|
||||
"type": "object",
|
||||
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"tasks",
|
||||
"apps"
|
||||
],
|
||||
|
||||
"properties": {
|
||||
"schema": {
|
||||
"description": "Schema identifier for payload",
|
||||
"type": "string"
|
||||
},
|
||||
"template": {
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {
|
||||
"^.*$": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"tasks": {
|
||||
"type": "object",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"icon": {"type": "string"},
|
||||
"group": {"type": "string"},
|
||||
"label": {"type": "string"}
|
||||
},
|
||||
"required": [
|
||||
"short_name"
|
||||
]
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"icon": {"type": "string"},
|
||||
"group": {"type": "string"},
|
||||
"label": {"type": "string"}
|
||||
},
|
||||
"required": ["name"]
|
||||
}
|
||||
},
|
||||
"families": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"icon": {"type": "string"},
|
||||
"label": {"type": "string"},
|
||||
"hideFilter": {"type": "boolean"}
|
||||
},
|
||||
"required": ["name"]
|
||||
}
|
||||
},
|
||||
"groups": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"icon": {"type": "string"},
|
||||
"color": {"type": "string"},
|
||||
"order": {"type": ["integer", "number"]}
|
||||
},
|
||||
"required": ["name"]
|
||||
}
|
||||
},
|
||||
"copy": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
}
|
||||
10
schema/inventory-1.1.json
Normal file
10
schema/inventory-1.1.json
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
|
||||
"title": "pype:config-1.1",
|
||||
"description": "A project configuration.",
|
||||
|
||||
"type": "object",
|
||||
|
||||
"additionalProperties": true
|
||||
}
|
||||
86
schema/project-2.1.json
Normal file
86
schema/project-2.1.json
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
|
||||
"title": "pype:project-2.1",
|
||||
"description": "A unit of data",
|
||||
|
||||
"type": "object",
|
||||
|
||||
"additionalProperties": true,
|
||||
|
||||
"required": [
|
||||
"schema",
|
||||
"type",
|
||||
"name",
|
||||
"data",
|
||||
"config"
|
||||
],
|
||||
|
||||
"properties": {
|
||||
"schema": {
|
||||
"description": "Schema identifier for payload",
|
||||
"type": "string",
|
||||
"enum": ["avalon-core:project-2.1", "pype:project-2.1"],
|
||||
"example": "avalon-core:project-2.1"
|
||||
},
|
||||
"type": {
|
||||
"description": "The type of document",
|
||||
"type": "string",
|
||||
"enum": ["project"],
|
||||
"example": "project"
|
||||
},
|
||||
"parent": {
|
||||
"description": "Unique identifier to parent document",
|
||||
"example": "592c33475f8c1b064c4d1696"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name of directory",
|
||||
"type": "string",
|
||||
"pattern": "^[a-zA-Z0-9_.]*$",
|
||||
"example": "hulk"
|
||||
},
|
||||
"data": {
|
||||
"description": "Document metadata",
|
||||
"type": "object",
|
||||
"example": {
|
||||
"fps": 24,
|
||||
"width": 1920,
|
||||
"height": 1080
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "Document metadata",
|
||||
"example": {
|
||||
"schema": "pype:config-1.1",
|
||||
"apps": [
|
||||
{
|
||||
"name": "maya2016",
|
||||
"label": "Autodesk Maya 2016"
|
||||
},
|
||||
{
|
||||
"name": "nuke10",
|
||||
"label": "The Foundry Nuke 10.0"
|
||||
}
|
||||
],
|
||||
"tasks": {
|
||||
"Model": {"short_name": "mdl"},
|
||||
"Render": {"short_name": "rnd"},
|
||||
"Animate": {"short_name": "anim"},
|
||||
"Rig": {"short_name": "rig"},
|
||||
"Lookdev": {"short_name": "look"},
|
||||
"Layout": {"short_name": "lay"}
|
||||
},
|
||||
"template": {
|
||||
"work":
|
||||
"{root}/{project}/{silo}/{asset}/work/{task}/{app}",
|
||||
"publish":
|
||||
"{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/{subset}.{representation}"
|
||||
}
|
||||
},
|
||||
"$ref": "config-1.1.json"
|
||||
}
|
||||
},
|
||||
|
||||
"definitions": {}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue