".format(
- str(traceback.format_exc()).replace(
- "\n", " ").replace(
- " ", " "
- )
- )
- })
-
- report = {"items": []}
- try:
- report = self.entities_factory.report()
- except Exception:
- pass
-
- _items = report.get("items", [])
- if _items:
- items.append(self.entities_factory.report_splitter)
- items.extend(_items)
-
- self.show_interface(items, title, event)
-
- return {"success": True, "message": msg}
-
finally:
try:
self.entities_factory.dbcon.uninstall()
diff --git a/openpype/modules/default_modules/ftrack/ftrack_module.py b/openpype/modules/default_modules/ftrack/ftrack_module.py
index 1de152535c..5a1fdbc276 100644
--- a/openpype/modules/default_modules/ftrack/ftrack_module.py
+++ b/openpype/modules/default_modules/ftrack/ftrack_module.py
@@ -1,16 +1,13 @@
import os
import json
import collections
-import openpype
from openpype.modules import OpenPypeModule
from openpype_interfaces import (
ITrayModule,
IPluginPaths,
- ITimersManager,
ILaunchHookPaths,
- ISettingsChangeListener,
- IFtrackEventHandlerPaths
+ ISettingsChangeListener
)
from openpype.settings import SaveWarningExc
@@ -21,7 +18,6 @@ class FtrackModule(
OpenPypeModule,
ITrayModule,
IPluginPaths,
- ITimersManager,
ILaunchHookPaths,
ISettingsChangeListener
):
@@ -61,6 +57,10 @@ class FtrackModule(
self.user_event_handlers_paths = user_event_handlers_paths
self.tray_module = None
+ # TimersManager connection
+ self.timers_manager_connector = None
+ self._timers_manager_module = None
+
def get_global_environments(self):
"""Ftrack's global environments."""
return {
@@ -79,9 +79,17 @@ class FtrackModule(
def connect_with_modules(self, enabled_modules):
for module in enabled_modules:
- if not isinstance(module, IFtrackEventHandlerPaths):
+ if not hasattr(module, "get_ftrack_event_handler_paths"):
continue
- paths_by_type = module.get_event_handler_paths() or {}
+
+ try:
+ paths_by_type = module.get_ftrack_event_handler_paths()
+ except Exception:
+ continue
+
+ if not isinstance(paths_by_type, dict):
+ continue
+
for key, value in paths_by_type.items():
if not value:
continue
@@ -102,16 +110,6 @@ class FtrackModule(
elif key == "user":
self.user_event_handlers_paths.extend(value)
- def start_timer(self, data):
- """Implementation of ITimersManager interface."""
- if self.tray_module:
- self.tray_module.start_timer_manager(data)
-
- def stop_timer(self):
- """Implementation of ITimersManager interface."""
- if self.tray_module:
- self.tray_module.stop_timer_manager()
-
def on_system_settings_save(
self, old_value, new_value, changes, new_value_metadata
):
@@ -231,7 +229,13 @@ class FtrackModule(
return
import ftrack_api
- from openpype_modules.ftrack.lib import get_openpype_attr
+ from openpype_modules.ftrack.lib import (
+ get_openpype_attr,
+ default_custom_attributes_definition,
+ CUST_ATTR_TOOLS,
+ CUST_ATTR_APPLICATIONS,
+ CUST_ATTR_INTENT
+ )
try:
session = self.create_ftrack_session()
@@ -256,6 +260,15 @@ class FtrackModule(
project_id = project_entity["id"]
+ ca_defs = default_custom_attributes_definition()
+ hierarchical_attrs = ca_defs.get("is_hierarchical") or {}
+ project_attrs = ca_defs.get("show") or {}
+ ca_keys = (
+ set(hierarchical_attrs.keys())
+ | set(project_attrs.keys())
+ | {CUST_ATTR_TOOLS, CUST_ATTR_APPLICATIONS, CUST_ATTR_INTENT}
+ )
+
cust_attr, hier_attr = get_openpype_attr(session)
cust_attr_by_key = {attr["key"]: attr for attr in cust_attr}
hier_attrs_by_key = {attr["key"]: attr for attr in hier_attr}
@@ -263,6 +276,9 @@ class FtrackModule(
failed = {}
missing = {}
for key, value in attributes_changes.items():
+ if key not in ca_keys:
+ continue
+
configuration = hier_attrs_by_key.get(key)
if not configuration:
configuration = cust_attr_by_key.get(key)
@@ -343,7 +359,10 @@ class FtrackModule(
def tray_init(self):
from .tray import FtrackTrayWrapper
+
self.tray_module = FtrackTrayWrapper(self)
+ # Module is it's own connector to TimersManager
+ self.timers_manager_connector = self
def tray_menu(self, parent_menu):
return self.tray_module.tray_menu(parent_menu)
@@ -357,3 +376,36 @@ class FtrackModule(
def set_credentials_to_env(self, username, api_key):
os.environ["FTRACK_API_USER"] = username or ""
os.environ["FTRACK_API_KEY"] = api_key or ""
+
+ # --- TimersManager connection methods ---
+ def start_timer(self, data):
+ if self.tray_module:
+ self.tray_module.start_timer_manager(data)
+
+ def stop_timer(self):
+ if self.tray_module:
+ self.tray_module.stop_timer_manager()
+
+ def register_timers_manager(self, timer_manager_module):
+ self._timers_manager_module = timer_manager_module
+
+ def timer_started(self, data):
+ if self._timers_manager_module is not None:
+ self._timers_manager_module.timer_started(self.id, data)
+
+ def timer_stopped(self):
+ if self._timers_manager_module is not None:
+ self._timers_manager_module.timer_stopped(self.id)
+
+ def get_task_time(self, project_name, asset_name, task_name):
+ session = self.create_ftrack_session()
+ query = (
+ 'Task where name is "{}"'
+ ' and parent.name is "{}"'
+ ' and project.full_name is "{}"'
+ ).format(task_name, asset_name, project_name)
+ task_entity = session.query(query).first()
+ if not task_entity:
+ return 0
+ hours_logged = (task_entity["time_logged"] / 60) / 60
+ return hours_logged
diff --git a/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py
index d8e4d05580..1a76905b38 100644
--- a/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py
+++ b/openpype/modules/default_modules/ftrack/ftrack_server/event_server_cli.py
@@ -6,7 +6,6 @@ import subprocess
import socket
import json
import platform
-import argparse
import getpass
import atexit
import time
@@ -16,7 +15,10 @@ import ftrack_api
import pymongo
from openpype.lib import (
get_pype_execute_args,
- OpenPypeMongoConnection
+ OpenPypeMongoConnection,
+ get_openpype_version,
+ get_build_version,
+ validate_mongo_connection
)
from openpype_modules.ftrack import FTRACK_MODULE_DIR
from openpype_modules.ftrack.lib import credentials
@@ -35,11 +37,15 @@ class MongoPermissionsError(Exception):
def check_mongo_url(mongo_uri, log_error=False):
"""Checks if mongo server is responding"""
try:
- client = pymongo.MongoClient(mongo_uri)
- # Force connection on a request as the connect=True parameter of
- # MongoClient seems to be useless here
- client.server_info()
- client.close()
+ validate_mongo_connection(mongo_uri)
+
+ except pymongo.errors.InvalidURI as err:
+ if log_error:
+ print("Can't connect to MongoDB at {} because: {}".format(
+ mongo_uri, err
+ ))
+ return False
+
except pymongo.errors.ServerSelectionTimeoutError as err:
if log_error:
print("Can't connect to MongoDB at {} because: {}".format(
@@ -236,14 +242,16 @@ def main_loop(ftrack_url):
statuser_thread=statuser_thread
)
- system_name, pc_name = platform.uname()[:2]
host_name = socket.gethostname()
- main_info = {
- "created_at": datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S"),
- "Username": getpass.getuser(),
- "Host Name": host_name,
- "Host IP": socket.gethostbyname(host_name)
- }
+ main_info = [
+ ["created_at", datetime.datetime.now().strftime("%Y.%m.%d %H:%M:%S")],
+ ["Username", getpass.getuser()],
+ ["Host Name", host_name],
+ ["Host IP", socket.gethostbyname(host_name)],
+ ["OpenPype executable", get_pype_execute_args()[-1]],
+ ["OpenPype version", get_openpype_version() or "N/A"],
+ ["OpenPype build version", get_build_version() or "N/A"]
+ ]
main_info_str = json.dumps(main_info)
# Main loop
while True:
diff --git a/openpype/modules/default_modules/ftrack/interfaces.py b/openpype/modules/default_modules/ftrack/interfaces.py
deleted file mode 100644
index 16ce0d2e62..0000000000
--- a/openpype/modules/default_modules/ftrack/interfaces.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from abc import abstractmethod
-from openpype.modules import OpenPypeInterface
-
-
-class IFtrackEventHandlerPaths(OpenPypeInterface):
- """Other modules interface to return paths to ftrack event handlers.
-
- Expected output is dictionary with "server" and "user" keys.
- """
- @abstractmethod
- def get_event_handler_paths(self):
- pass
diff --git a/openpype/modules/default_modules/ftrack/lib/__init__.py b/openpype/modules/default_modules/ftrack/lib/__init__.py
index 9dc2d67279..80b4db9dd6 100644
--- a/openpype/modules/default_modules/ftrack/lib/__init__.py
+++ b/openpype/modules/default_modules/ftrack/lib/__init__.py
@@ -3,10 +3,10 @@ from .constants import (
CUST_ATTR_AUTO_SYNC,
CUST_ATTR_GROUP,
CUST_ATTR_TOOLS,
- CUST_ATTR_APPLICATIONS
+ CUST_ATTR_APPLICATIONS,
+ CUST_ATTR_INTENT
)
-from . settings import (
- get_ftrack_url_from_settings,
+from .settings import (
get_ftrack_event_mongo_info
)
from .custom_attributes import (
@@ -31,7 +31,6 @@ __all__ = (
"CUST_ATTR_TOOLS",
"CUST_ATTR_APPLICATIONS",
- "get_ftrack_url_from_settings",
"get_ftrack_event_mongo_info",
"default_custom_attributes_definition",
diff --git a/openpype/modules/default_modules/ftrack/lib/constants.py b/openpype/modules/default_modules/ftrack/lib/constants.py
index 73d5112e6d..e6e2013d2b 100644
--- a/openpype/modules/default_modules/ftrack/lib/constants.py
+++ b/openpype/modules/default_modules/ftrack/lib/constants.py
@@ -10,3 +10,5 @@ CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
CUST_ATTR_APPLICATIONS = "applications"
# Environment tools custom attribute
CUST_ATTR_TOOLS = "tools_env"
+# Intent custom attribute name
+CUST_ATTR_INTENT = "intent"
diff --git a/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py
index 7027154d86..a457b886ac 100644
--- a/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py
+++ b/openpype/modules/default_modules/ftrack/lib/ftrack_base_handler.py
@@ -384,8 +384,8 @@ class BaseHandler(object):
)
def show_interface(
- self, items, title='',
- event=None, user=None, username=None, user_id=None
+ self, items, title="", event=None, user=None,
+ username=None, user_id=None, submit_btn_label=None
):
"""
Shows interface to user
@@ -428,14 +428,18 @@ class BaseHandler(object):
'applicationId=ftrack.client.web and user.id="{0}"'
).format(user_id)
+ event_data = {
+ "type": "widget",
+ "items": items,
+ "title": title
+ }
+ if submit_btn_label:
+ event_data["submit_button_label"] = submit_btn_label
+
self.session.event_hub.publish(
ftrack_api.event.base.Event(
topic='ftrack.action.trigger-user-interface',
- data=dict(
- type='widget',
- items=items,
- title=title
- ),
+ data=event_data,
target=target
),
on_error='ignore'
@@ -443,7 +447,7 @@ class BaseHandler(object):
def show_interface_from_dict(
self, messages, title="", event=None,
- user=None, username=None, user_id=None
+ user=None, username=None, user_id=None, submit_btn_label=None
):
if not messages:
self.log.debug("No messages to show! (messages dict is empty)")
@@ -469,7 +473,9 @@ class BaseHandler(object):
message = {'type': 'label', 'value': '
{}
'.format(value)}
items.append(message)
- self.show_interface(items, title, event, user, username, user_id)
+ self.show_interface(
+ items, title, event, user, username, user_id, submit_btn_label
+ )
def trigger_action(
self, action_name, event=None, session=None,
diff --git a/openpype/modules/default_modules/ftrack/lib/settings.py b/openpype/modules/default_modules/ftrack/lib/settings.py
index 027356edc6..bf44981de0 100644
--- a/openpype/modules/default_modules/ftrack/lib/settings.py
+++ b/openpype/modules/default_modules/ftrack/lib/settings.py
@@ -1,13 +1,4 @@
import os
-from openpype.api import get_system_settings
-
-
-def get_ftrack_settings():
- return get_system_settings()["modules"]["ftrack"]
-
-
-def get_ftrack_url_from_settings():
- return get_ftrack_settings()["ftrack_server"]
def get_ftrack_event_mongo_info():
diff --git a/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py
index 51b45eb93b..d1e2e3aaeb 100644
--- a/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py
+++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_processor.py
@@ -13,6 +13,11 @@ from openpype_modules.ftrack.ftrack_server.lib import (
from openpype.modules import ModulesManager
from openpype.api import Logger
+from openpype.lib import (
+ get_openpype_version,
+ get_build_version
+)
+
import ftrack_api
@@ -40,9 +45,11 @@ def send_status(event):
new_event_data = {
"subprocess_id": subprocess_id,
"source": "processor",
- "status_info": {
- "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S")
- }
+ "status_info": [
+ ["created_at", subprocess_started.strftime("%Y.%m.%d %H:%M:%S")],
+ ["OpenPype version", get_openpype_version() or "N/A"],
+ ["OpenPype build version", get_build_version() or "N/A"]
+ ]
}
new_event = ftrack_api.event.base.Event(
diff --git a/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py
index 8a2733b635..004f61338c 100644
--- a/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py
+++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_status.py
@@ -2,6 +2,7 @@ import os
import sys
import json
import threading
+import collections
import signal
import socket
import datetime
@@ -165,7 +166,7 @@ class StatusFactory:
return
source = event["data"]["source"]
- data = event["data"]["status_info"]
+ data = collections.OrderedDict(event["data"]["status_info"])
self.update_status_info(source, data)
@@ -348,7 +349,7 @@ def heartbeat():
def main(args):
port = int(args[-1])
- server_info = json.loads(args[-2])
+ server_info = collections.OrderedDict(json.loads(args[-2]))
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
diff --git a/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py
index a8649e0ccc..5543ed74e2 100644
--- a/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py
+++ b/openpype/modules/default_modules/ftrack/scripts/sub_event_storer.py
@@ -14,7 +14,11 @@ from openpype_modules.ftrack.ftrack_server.lib import (
TOPIC_STATUS_SERVER_RESULT
)
from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info
-from openpype.lib import OpenPypeMongoConnection
+from openpype.lib import (
+ OpenPypeMongoConnection,
+ get_openpype_version,
+ get_build_version
+)
from openpype.api import Logger
log = Logger.get_logger("Event storer")
@@ -153,9 +157,11 @@ def send_status(event):
new_event_data = {
"subprocess_id": os.environ["FTRACK_EVENT_SUB_ID"],
"source": "storer",
- "status_info": {
- "created_at": subprocess_started.strftime("%Y.%m.%d %H:%M:%S")
- }
+ "status_info": [
+ ["created_at", subprocess_started.strftime("%Y.%m.%d %H:%M:%S")],
+ ["OpenPype version", get_openpype_version() or "N/A"],
+ ["OpenPype build version", get_build_version() or "N/A"]
+ ]
}
new_event = ftrack_api.event.base.Event(
diff --git a/openpype/modules/default_modules/ftrack/tray/login_dialog.py b/openpype/modules/default_modules/ftrack/tray/login_dialog.py
index 6384621c8e..05d9226ca4 100644
--- a/openpype/modules/default_modules/ftrack/tray/login_dialog.py
+++ b/openpype/modules/default_modules/ftrack/tray/login_dialog.py
@@ -25,7 +25,7 @@ class CredentialsDialog(QtWidgets.QDialog):
self._is_logged = False
self._in_advance_mode = False
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(
diff --git a/openpype/modules/default_modules/idle_manager/__init__.py b/openpype/modules/default_modules/idle_manager/__init__.py
deleted file mode 100644
index 9d6e10bf39..0000000000
--- a/openpype/modules/default_modules/idle_manager/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from .idle_module import (
- IdleManager
-)
-
-
-__all__ = (
- "IdleManager",
-)
diff --git a/openpype/modules/default_modules/idle_manager/idle_module.py b/openpype/modules/default_modules/idle_manager/idle_module.py
deleted file mode 100644
index 1a6d71a961..0000000000
--- a/openpype/modules/default_modules/idle_manager/idle_module.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import platform
-import collections
-
-from openpype.modules import OpenPypeModule
-from openpype_interfaces import (
- ITrayService,
- IIdleManager
-)
-
-
-class IdleManager(OpenPypeModule, ITrayService):
- """ Measure user's idle time in seconds.
- Idle time resets on keyboard/mouse input.
- Is able to emit signals at specific time idle.
- """
- label = "Idle Service"
- name = "idle_manager"
-
- def initialize(self, module_settings):
- enabled = True
- # Ignore on MacOs
- # - pynput need root permissions and enabled access for application
- if platform.system().lower() == "darwin":
- enabled = False
- self.enabled = enabled
-
- self.time_callbacks = collections.defaultdict(list)
- self.idle_thread = None
-
- def tray_init(self):
- return
-
- def tray_start(self):
- if self.time_callbacks:
- self.start_thread()
-
- def tray_exit(self):
- self.stop_thread()
- try:
- self.time_callbacks = {}
- except Exception:
- pass
-
- def connect_with_modules(self, enabled_modules):
- for module in enabled_modules:
- if not isinstance(module, IIdleManager):
- continue
-
- module.idle_manager = self
- callbacks_items = module.callbacks_by_idle_time() or {}
- for emit_time, callbacks in callbacks_items.items():
- if not isinstance(callbacks, (tuple, list, set)):
- callbacks = [callbacks]
- self.time_callbacks[emit_time].extend(callbacks)
-
- @property
- def idle_time(self):
- if self.idle_thread and self.idle_thread.is_running:
- return self.idle_thread.idle_time
-
- def _create_thread(self):
- from .idle_threads import IdleManagerThread
-
- return IdleManagerThread(self)
-
- def start_thread(self):
- if self.idle_thread:
- self.idle_thread.stop()
- self.idle_thread.join()
- self.idle_thread = self._create_thread()
- self.idle_thread.start()
-
- def stop_thread(self):
- if self.idle_thread:
- self.idle_thread.stop()
- self.idle_thread.join()
-
- def on_thread_stop(self):
- self.set_service_failed_icon()
diff --git a/openpype/modules/default_modules/idle_manager/idle_threads.py b/openpype/modules/default_modules/idle_manager/idle_threads.py
deleted file mode 100644
index f19feddb77..0000000000
--- a/openpype/modules/default_modules/idle_manager/idle_threads.py
+++ /dev/null
@@ -1,97 +0,0 @@
-import time
-import threading
-
-from pynput import mouse, keyboard
-
-from openpype.lib import PypeLogger
-
-
-class MouseThread(mouse.Listener):
- """Listens user's mouse movement."""
-
- def __init__(self, callback):
- super(MouseThread, self).__init__(on_move=self.on_move)
- self.callback = callback
-
- def on_move(self, posx, posy):
- self.callback()
-
-
-class KeyboardThread(keyboard.Listener):
- """Listens user's keyboard input."""
-
- def __init__(self, callback):
- super(KeyboardThread, self).__init__(on_press=self.on_press)
-
- self.callback = callback
-
- def on_press(self, key):
- self.callback()
-
-
-class IdleManagerThread(threading.Thread):
- def __init__(self, module, *args, **kwargs):
- super(IdleManagerThread, self).__init__(*args, **kwargs)
- self.log = PypeLogger.get_logger(self.__class__.__name__)
- self.module = module
- self.threads = []
- self.is_running = False
- self.idle_time = 0
-
- def stop(self):
- self.is_running = False
-
- def reset_time(self):
- self.idle_time = 0
-
- @property
- def time_callbacks(self):
- return self.module.time_callbacks
-
- def on_stop(self):
- self.is_running = False
- self.log.info("IdleManagerThread has stopped")
- self.module.on_thread_stop()
-
- def run(self):
- self.log.info("IdleManagerThread has started")
- self.is_running = True
- thread_mouse = MouseThread(self.reset_time)
- thread_keyboard = KeyboardThread(self.reset_time)
- thread_mouse.start()
- thread_keyboard.start()
- try:
- while self.is_running:
- if self.idle_time in self.time_callbacks:
- for callback in self.time_callbacks[self.idle_time]:
- thread = threading.Thread(target=callback)
- thread.start()
- self.threads.append(thread)
-
- for thread in tuple(self.threads):
- if not thread.isAlive():
- thread.join()
- self.threads.remove(thread)
-
- self.idle_time += 1
- time.sleep(1)
-
- except Exception:
- self.log.warning(
- 'Idle Manager service has failed', exc_info=True
- )
-
- # Threads don't have their attrs when Qt application already finished
- try:
- thread_mouse.stop()
- thread_mouse.join()
- except AttributeError:
- pass
-
- try:
- thread_keyboard.stop()
- thread_keyboard.join()
- except AttributeError:
- pass
-
- self.on_stop()
diff --git a/openpype/modules/default_modules/idle_manager/interfaces.py b/openpype/modules/default_modules/idle_manager/interfaces.py
deleted file mode 100644
index 71cd17a64a..0000000000
--- a/openpype/modules/default_modules/idle_manager/interfaces.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from abc import abstractmethod
-from openpype.modules import OpenPypeInterface
-
-
-class IIdleManager(OpenPypeInterface):
- """Other modules interface to return callbacks by idle time in seconds.
-
- Expected output is dictionary with seconds as keys and callback/s
- as value, value may be callback of list of callbacks.
- EXAMPLE:
- ```
- {
- 60: self.on_minute_idle
- }
- ```
- """
- idle_manager = None
-
- @abstractmethod
- def callbacks_by_idle_time(self):
- pass
-
- @property
- def idle_time(self):
- if self.idle_manager:
- return self.idle_manager.idle_time
diff --git a/openpype/modules/default_modules/log_viewer/log_view_module.py b/openpype/modules/default_modules/log_viewer/log_view_module.py
index bc1a98f4ad..14be6b392e 100644
--- a/openpype/modules/default_modules/log_viewer/log_view_module.py
+++ b/openpype/modules/default_modules/log_viewer/log_view_module.py
@@ -40,10 +40,6 @@ class LogViewModule(OpenPypeModule, ITrayModule):
def tray_exit(self):
return
- def connect_with_modules(self, _enabled_modules):
- """Nothing special."""
- return
-
def _show_logs_gui(self):
if self.window:
self.window.show()
diff --git a/openpype/modules/default_modules/muster/muster.py b/openpype/modules/default_modules/muster/muster.py
index a0e72006af..6e26ad2d7b 100644
--- a/openpype/modules/default_modules/muster/muster.py
+++ b/openpype/modules/default_modules/muster/muster.py
@@ -3,13 +3,10 @@ import json
import appdirs
import requests
from openpype.modules import OpenPypeModule
-from openpype_interfaces import (
- ITrayModule,
- IWebServerRoutes
-)
+from openpype_interfaces import ITrayModule
-class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
+class MusterModule(OpenPypeModule, ITrayModule):
"""
Module handling Muster Render credentials. This will display dialog
asking for user credentials for Muster if not already specified.
@@ -54,9 +51,6 @@ class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
"""Nothing special for Muster."""
return
- def connect_with_modules(self, *_a, **_kw):
- return
-
# Definition of Tray menu
def tray_menu(self, parent):
"""Add **change credentials** option to tray menu."""
@@ -76,13 +70,6 @@ class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
parent.addMenu(menu)
- def webserver_initialization(self, server_manager):
- """Implementation of IWebServerRoutes interface."""
- if self.tray_initialized:
- from .rest_api import MusterModuleRestApi
-
- self.rest_api_obj = MusterModuleRestApi(self, server_manager)
-
def load_credentials(self):
"""
Get credentials from JSON file
@@ -142,6 +129,14 @@ class MusterModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
if self.widget_login:
self.widget_login.show()
+ # Webserver module implementation
+ def webserver_initialization(self, server_manager):
+ """Add routes for Muster login."""
+ if self.tray_initialized:
+ from .rest_api import MusterModuleRestApi
+
+ self.rest_api_obj = MusterModuleRestApi(self, server_manager)
+
def _requests_post(self, *args, **kwargs):
""" Wrapper for requests, disabling SSL certificate validation if
DONT_VERIFY_SSL environment variable is found. This is useful when
diff --git a/openpype/modules/default_modules/muster/widget_login.py b/openpype/modules/default_modules/muster/widget_login.py
index 231b52c6bd..ae838c6cea 100644
--- a/openpype/modules/default_modules/muster/widget_login.py
+++ b/openpype/modules/default_modules/muster/widget_login.py
@@ -17,7 +17,7 @@ class MusterLogin(QtWidgets.QWidget):
self.module = module
# Icon
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(
diff --git a/openpype/modules/default_modules/project_manager_action.py b/openpype/modules/default_modules/project_manager_action.py
index c1f984a8cb..251964a059 100644
--- a/openpype/modules/default_modules/project_manager_action.py
+++ b/openpype/modules/default_modules/project_manager_action.py
@@ -17,9 +17,6 @@ class ProjectManagerAction(OpenPypeModule, ITrayAction):
# Tray attributes
self.project_manager_window = None
- def connect_with_modules(self, *_a, **_kw):
- return
-
def tray_init(self):
"""Initialization in tray implementation of ITrayAction."""
self.create_project_manager_window()
diff --git a/openpype/modules/default_modules/python_console_interpreter/module.py b/openpype/modules/default_modules/python_console_interpreter/module.py
index f4df3fb6d8..8c4a2fba73 100644
--- a/openpype/modules/default_modules/python_console_interpreter/module.py
+++ b/openpype/modules/default_modules/python_console_interpreter/module.py
@@ -18,9 +18,6 @@ class PythonInterpreterAction(OpenPypeModule, ITrayAction):
if self._interpreter_window is not None:
self._interpreter_window.save_registry()
- def connect_with_modules(self, *args, **kwargs):
- pass
-
def create_interpreter_window(self):
"""Initializa Settings Qt window."""
if self._interpreter_window:
diff --git a/openpype/modules/default_modules/python_console_interpreter/window/widgets.py b/openpype/modules/default_modules/python_console_interpreter/window/widgets.py
index 975decf4f4..0e8dd2fb9b 100644
--- a/openpype/modules/default_modules/python_console_interpreter/window/widgets.py
+++ b/openpype/modules/default_modules/python_console_interpreter/window/widgets.py
@@ -331,7 +331,7 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
super(PythonInterpreterWidget, self).__init__(parent)
self.setWindowTitle("OpenPype Console")
- self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath()))
+ self.setWindowIcon(QtGui.QIcon(resources.get_openpype_icon_filepath()))
self.ansi_escape = re.compile(
r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]"
@@ -387,8 +387,6 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
self.setStyleSheet(load_stylesheet())
- self.resize(self.default_width, self.default_height)
-
self._init_from_registry()
if self._tab_widget.count() < 1:
@@ -396,16 +394,23 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
def _init_from_registry(self):
setting_registry = PythonInterpreterRegistry()
-
+ width = None
+ height = None
try:
width = setting_registry.get_item("width")
height = setting_registry.get_item("height")
- if width is not None and height is not None:
- self.resize(width, height)
except ValueError:
pass
+ if width is None or width < 200:
+ width = self.default_width
+
+ if height is None or height < 200:
+ height = self.default_height
+
+ self.resize(width, height)
+
try:
sizes = setting_registry.get_item("splitter_sizes")
if len(sizes) == len(self._widgets_splitter.sizes()):
diff --git a/openpype/modules/default_modules/settings_module/settings_action.py b/openpype/modules/default_modules/settings_module/settings_action.py
index 7140c57bab..2b4b51e3ad 100644
--- a/openpype/modules/default_modules/settings_module/settings_action.py
+++ b/openpype/modules/default_modules/settings_module/settings_action.py
@@ -19,9 +19,6 @@ class SettingsAction(OpenPypeModule, ITrayAction):
# Tray attributes
self.settings_window = None
- def connect_with_modules(self, *_a, **_kw):
- return
-
def tray_init(self):
"""Initialization in tray implementation of ITrayAction."""
self.create_settings_window()
@@ -84,9 +81,6 @@ class LocalSettingsAction(OpenPypeModule, ITrayAction):
self.settings_window = None
self._first_trigger = True
- def connect_with_modules(self, *_a, **_kw):
- return
-
def tray_init(self):
"""Initialization in tray implementation of ITrayAction."""
self.create_settings_window()
diff --git a/openpype/modules/default_modules/slack/slack_module.py b/openpype/modules/default_modules/slack/slack_module.py
index e3f7b4ad19..9b2976d766 100644
--- a/openpype/modules/default_modules/slack/slack_module.py
+++ b/openpype/modules/default_modules/slack/slack_module.py
@@ -17,10 +17,6 @@ class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths):
slack_settings = modules_settings[self.name]
self.enabled = slack_settings["enabled"]
- def connect_with_modules(self, _enabled_modules):
- """Nothing special."""
- return
-
def get_launch_hook_paths(self):
"""Implementation of `ILaunchHookPaths`."""
return os.path.join(SLACK_MODULE_DIR, "launch_hooks")
diff --git a/openpype/modules/default_modules/sync_server/providers/abstract_provider.py b/openpype/modules/default_modules/sync_server/providers/abstract_provider.py
index 2e9632134c..8ae0ceed79 100644
--- a/openpype/modules/default_modules/sync_server/providers/abstract_provider.py
+++ b/openpype/modules/default_modules/sync_server/providers/abstract_provider.py
@@ -29,13 +29,35 @@ class AbstractProvider:
@classmethod
@abc.abstractmethod
- def get_configurable_items(cls):
+ def get_system_settings_schema(cls):
"""
- Returns filtered dict of editable properties
+ Returns dict for editable properties on system settings level
Returns:
- (dict)
+ (list) of dict
+ """
+
+ @classmethod
+ @abc.abstractmethod
+ def get_project_settings_schema(cls):
+ """
+ Returns dict for editable properties on project settings level
+
+
+ Returns:
+ (list) of dict
+ """
+
+ @classmethod
+ @abc.abstractmethod
+ def get_local_settings_schema(cls):
+ """
+ Returns dict for editable properties on local settings level
+
+
+ Returns:
+ (list) of dict
"""
@abc.abstractmethod
@@ -58,7 +80,8 @@ class AbstractProvider:
representation (dict): complete repre containing 'file'
site (str): site name
Returns:
- (string) file_id of created file, raises exception
+ (string) file_id of created/modified file ,
+ throws FileExistsError, FileNotFoundError exceptions
"""
pass
@@ -81,7 +104,8 @@ class AbstractProvider:
representation (dict): complete repre containing 'file'
site (str): site name
Returns:
- None
+ (string) file_id of created/modified file ,
+ throws FileExistsError, FileNotFoundError exceptions
"""
pass
diff --git a/openpype/modules/default_modules/sync_server/providers/dropbox.py b/openpype/modules/default_modules/sync_server/providers/dropbox.py
new file mode 100644
index 0000000000..0d735a0b59
--- /dev/null
+++ b/openpype/modules/default_modules/sync_server/providers/dropbox.py
@@ -0,0 +1,423 @@
+import os
+
+import dropbox
+
+from openpype.api import Logger
+from .abstract_provider import AbstractProvider
+from ..utils import EditableScopes
+
+log = Logger().get_logger("SyncServer")
+
+
+class DropboxHandler(AbstractProvider):
+ CODE = 'dropbox'
+ LABEL = 'Dropbox'
+
+ def __init__(self, project_name, site_name, tree=None, presets=None):
+ self.active = False
+ self.site_name = site_name
+ self.presets = presets
+
+ if not self.presets:
+ log.info(
+ "Sync Server: There are no presets for {}.".format(site_name)
+ )
+ return
+
+ provider_presets = self.presets.get(self.CODE)
+ if not provider_presets:
+ msg = "Sync Server: No provider presets for {}".format(self.CODE)
+ log.info(msg)
+ return
+
+ token = self.presets[self.CODE].get("token", "")
+ if not token:
+ msg = "Sync Server: No access token for dropbox provider"
+ log.info(msg)
+ return
+
+ team_folder_name = self.presets[self.CODE].get("team_folder_name", "")
+ if not team_folder_name:
+ msg = "Sync Server: No team folder name for dropbox provider"
+ log.info(msg)
+ return
+
+ acting_as_member = self.presets[self.CODE].get("acting_as_member", "")
+ if not acting_as_member:
+ msg = (
+ "Sync Server: No acting member for dropbox provider"
+ )
+ log.info(msg)
+ return
+
+ self.dbx = None
+ try:
+ self.dbx = self._get_service(
+ token, acting_as_member, team_folder_name
+ )
+ except Exception as e:
+ log.info("Could not establish dropbox object: {}".format(e))
+ return
+
+ super(AbstractProvider, self).__init__()
+
+ @classmethod
+ def get_system_settings_schema(cls):
+ """
+ Returns dict for editable properties on system settings level
+
+
+ Returns:
+ (list) of dict
+ """
+ return []
+
+ @classmethod
+ def get_project_settings_schema(cls):
+ """
+ Returns dict for editable properties on project settings level
+
+
+ Returns:
+ (list) of dict
+ """
+ # {platform} tells that value is multiplatform and only specific OS
+ # should be returned
+ return [
+ {
+ "type": "text",
+ "key": "token",
+ "label": "Access Token"
+ },
+ {
+ "type": "text",
+ "key": "team_folder_name",
+ "label": "Team Folder Name"
+ },
+ {
+ "type": "text",
+ "key": "acting_as_member",
+ "label": "Acting As Member"
+ },
+ # roots could be overriden only on Project level, User cannot
+ {
+ 'key': "roots",
+ 'label': "Roots",
+ 'type': 'dict'
+ }
+ ]
+
+ @classmethod
+ def get_local_settings_schema(cls):
+ """
+ Returns dict for editable properties on local settings level
+
+
+ Returns:
+ (dict)
+ """
+ return []
+
+ def _get_service(self, token, acting_as_member, team_folder_name):
+ dbx = dropbox.DropboxTeam(token)
+
+ # Getting member id.
+ member_id = None
+ member_names = []
+ for member in dbx.team_members_list().members:
+ member_names.append(member.profile.name.display_name)
+ if member.profile.name.display_name == acting_as_member:
+ member_id = member.profile.team_member_id
+
+ if member_id is None:
+ raise ValueError(
+ "Could not find member \"{}\". Available members: {}".format(
+ acting_as_member, member_names
+ )
+ )
+
+ # Getting team folder id.
+ team_folder_id = None
+ team_folder_names = []
+ for entry in dbx.team_team_folder_list().team_folders:
+ team_folder_names.append(entry.name)
+ if entry.name == team_folder_name:
+ team_folder_id = entry.team_folder_id
+
+ if team_folder_id is None:
+ raise ValueError(
+ "Could not find team folder \"{}\". Available folders: "
+ "{}".format(
+ team_folder_name, team_folder_names
+ )
+ )
+
+ # Establish dropbox object.
+ path_root = dropbox.common.PathRoot.namespace_id(team_folder_id)
+ return dropbox.DropboxTeam(
+ token
+ ).with_path_root(path_root).as_user(member_id)
+
+ def is_active(self):
+ """
+ Returns True if provider is activated, eg. has working credentials.
+ Returns:
+ (boolean)
+ """
+ return self.dbx is not None
+
+ @classmethod
+ def get_configurable_items(cls):
+ """
+ Returns filtered dict of editable properties
+
+
+ Returns:
+ (dict)
+ """
+ editable = {
+ 'token': {
+ 'scope': [EditableScopes.PROJECT],
+ 'label': "Access Token",
+ 'type': 'text',
+ 'namespace': (
+ '{project_settings}/global/sync_server/sites/{site}/token'
+ )
+ },
+ 'team_folder_name': {
+ 'scope': [EditableScopes.PROJECT],
+ 'label': "Team Folder Name",
+ 'type': 'text',
+ 'namespace': (
+ '{project_settings}/global/sync_server/sites/{site}'
+ '/team_folder_name'
+ )
+ },
+ 'acting_as_member': {
+ 'scope': [EditableScopes.PROJECT, EditableScopes.LOCAL],
+ 'label': "Acting As Member",
+ 'type': 'text',
+ 'namespace': (
+ '{project_settings}/global/sync_server/sites/{site}'
+ '/acting_as_member'
+ )
+ }
+ }
+ return editable
+
+ def _path_exists(self, path):
+ try:
+ entries = self.dbx.files_list_folder(
+ path=os.path.dirname(path)
+ ).entries
+ except dropbox.exceptions.ApiError:
+ return False
+
+ for entry in entries:
+ if entry.name == os.path.basename(path):
+ return True
+
+ return False
+
+ def upload_file(self, source_path, path,
+ server, collection, file, representation, site,
+ overwrite=False):
+ """
+ Copy file from 'source_path' to 'target_path' on provider.
+ Use 'overwrite' boolean to rewrite existing file on provider
+
+ Args:
+ source_path (string):
+ path (string): absolute path with or without name of the file
+ overwrite (boolean): replace existing file
+
+ arguments for saving progress:
+ server (SyncServer): server instance to call update_db on
+ collection (str): name of collection
+ file (dict): info about uploaded file (matches structure from db)
+ representation (dict): complete repre containing 'file'
+ site (str): site name
+ Returns:
+ (string) file_id of created file, raises exception
+ """
+ # Check source path.
+ if not os.path.exists(source_path):
+ raise FileNotFoundError(
+ "Source file {} doesn't exist.".format(source_path)
+ )
+
+ if self._path_exists(path) and not overwrite:
+ raise FileExistsError(
+ "File already exists, use 'overwrite' argument"
+ )
+
+ mode = dropbox.files.WriteMode("add", None)
+ if overwrite:
+ mode = dropbox.files.WriteMode.overwrite
+
+ with open(source_path, "rb") as f:
+ self.dbx.files_upload(f.read(), path, mode=mode)
+
+ server.update_db(
+ collection=collection,
+ new_file_id=None,
+ file=file,
+ representation=representation,
+ site=site,
+ progress=100
+ )
+
+ return path
+
+ def download_file(self, source_path, local_path,
+ server, collection, file, representation, site,
+ overwrite=False):
+ """
+ Download file from provider into local system
+
+ Args:
+ source_path (string): absolute path on provider
+ local_path (string): absolute path with or without name of the file
+ overwrite (boolean): replace existing file
+
+ arguments for saving progress:
+ server (SyncServer): server instance to call update_db on
+ collection (str): name of collection
+ file (dict): info about uploaded file (matches structure from db)
+ representation (dict): complete repre containing 'file'
+ site (str): site name
+ Returns:
+ None
+ """
+ # Check source path.
+ if not self._path_exists(source_path):
+ raise FileNotFoundError(
+ "Source file {} doesn't exist.".format(source_path)
+ )
+
+ if os.path.exists(local_path) and not overwrite:
+ raise FileExistsError(
+ "File already exists, use 'overwrite' argument"
+ )
+
+ if os.path.exists(local_path) and overwrite:
+ os.unlink(local_path)
+
+ self.dbx.files_download_to_file(local_path, source_path)
+
+ server.update_db(
+ collection=collection,
+ new_file_id=None,
+ file=file,
+ representation=representation,
+ site=site,
+ progress=100
+ )
+
+ return os.path.basename(source_path)
+
+ def delete_file(self, path):
+ """
+ Deletes file from 'path'. Expects path to specific file.
+
+ Args:
+ path (string): absolute path to particular file
+
+ Returns:
+ None
+ """
+ if not self._path_exists(path):
+ raise FileExistsError("File {} doesn't exist".format(path))
+
+ self.dbx.files_delete(path)
+
+ def list_folder(self, folder_path):
+ """
+ List all files and subfolders of particular path non-recursively.
+ Args:
+ folder_path (string): absolut path on provider
+
+ Returns:
+ (list)
+ """
+ if not self._path_exists(folder_path):
+ raise FileExistsError(
+ "Folder \"{}\" does not exist".format(folder_path)
+ )
+
+ entry_names = []
+ for entry in self.dbx.files_list_folder(path=folder_path).entries:
+ entry_names.append(entry.name)
+ return entry_names
+
+ def create_folder(self, folder_path):
+ """
+ Create all nonexistent folders and subfolders in 'path'.
+
+ Args:
+ path (string): absolute path
+
+ Returns:
+ (string) folder id of lowest subfolder from 'path'
+ """
+ if self._path_exists(folder_path):
+ return folder_path
+
+ self.dbx.files_create_folder_v2(folder_path)
+
+ return folder_path
+
+ def get_tree(self):
+ """
+ Creates folder structure for providers which do not provide
+ tree folder structure (GDrive has no accessible tree structure,
+ only parents and their parents)
+ """
+ pass
+
+ def get_roots_config(self, anatomy=None):
+ """
+ Returns root values for path resolving
+
+ Takes value from Anatomy which takes values from Settings
+ overridden by Local Settings
+
+ Returns:
+ (dict) - {"root": {"root": "/My Drive"}}
+ OR
+ {"root": {"root_ONE": "value", "root_TWO":"value}}
+ Format is importing for usage of python's format ** approach
+ """
+ return self.presets['root']
+
+ def resolve_path(self, path, root_config=None, anatomy=None):
+ """
+ Replaces all root placeholders with proper values
+
+ Args:
+ path(string): root[work]/folder...
+ root_config (dict): {'work': "c:/..."...}
+ anatomy (Anatomy): object of Anatomy
+ Returns:
+ (string): proper url
+ """
+ if not root_config:
+ root_config = self.get_roots_config(anatomy)
+
+ if root_config and not root_config.get("root"):
+ root_config = {"root": root_config}
+
+ try:
+ if not root_config:
+ raise KeyError
+
+ path = path.format(**root_config)
+ except KeyError:
+ try:
+ path = anatomy.fill_root(path)
+ except KeyError:
+ msg = "Error in resolving local root from anatomy"
+ log.error(msg)
+ raise ValueError(msg)
+
+ return path
diff --git a/openpype/modules/default_modules/sync_server/providers/gdrive.py b/openpype/modules/default_modules/sync_server/providers/gdrive.py
index 18d679b833..0aabd9fbcd 100644
--- a/openpype/modules/default_modules/sync_server/providers/gdrive.py
+++ b/openpype/modules/default_modules/sync_server/providers/gdrive.py
@@ -8,7 +8,7 @@ import platform
from openpype.api import Logger
from openpype.api import get_system_settings
from .abstract_provider import AbstractProvider
-from ..utils import time_function, ResumableError, EditableScopes
+from ..utils import time_function, ResumableError
log = Logger().get_logger("SyncServer")
@@ -61,7 +61,6 @@ class GDriveHandler(AbstractProvider):
CHUNK_SIZE = 2097152 # must be divisible by 256! used for upload chunks
def __init__(self, project_name, site_name, tree=None, presets=None):
- self.presets = None
self.active = False
self.project_name = project_name
self.site_name = site_name
@@ -74,7 +73,13 @@ class GDriveHandler(AbstractProvider):
format(site_name))
return
- cred_path = self.presets.get("credentials_url", {}).\
+ provider_presets = self.presets.get(self.CODE)
+ if not provider_presets:
+ msg = "Sync Server: No provider presets for {}".format(self.CODE)
+ log.info(msg)
+ return
+
+ cred_path = self.presets[self.CODE].get("credentials_url", {}).\
get(platform.system().lower()) or ''
if not os.path.exists(cred_path):
msg = "Sync Server: No credentials for gdrive provider " + \
@@ -96,30 +101,61 @@ class GDriveHandler(AbstractProvider):
return self.service is not None
@classmethod
- def get_configurable_items(cls):
+ def get_system_settings_schema(cls):
"""
- Returns filtered dict of editable properties.
+ Returns dict for editable properties on system settings level
+
+
+ Returns:
+ (list) of dict
+ """
+ return []
+
+ @classmethod
+ def get_project_settings_schema(cls):
+ """
+ Returns dict for editable properties on project settings level
+
+
+ Returns:
+ (list) of dict
+ """
+ # {platform} tells that value is multiplatform and only specific OS
+ # should be returned
+ editable = [
+ # credentials could be overriden on Project or User level
+ {
+ 'key': "credentials_url",
+ 'label': "Credentials url",
+ 'type': 'text'
+ },
+ # roots could be overriden only on Project leve, User cannot
+ {
+ 'key': "roots",
+ 'label': "Roots",
+ 'type': 'dict'
+ }
+ ]
+ return editable
+
+ @classmethod
+ def get_local_settings_schema(cls):
+ """
+ Returns dict for editable properties on local settings level
Returns:
(dict)
"""
- # {platform} tells that value is multiplatform and only specific OS
- # should be returned
- editable = {
+ editable = [
# credentials could be override on Project or User level
- 'credentials_url': {
- 'scope': [EditableScopes.PROJECT,
- EditableScopes.LOCAL],
+ {
+ 'key': "credentials_url",
'label': "Credentials url",
'type': 'text',
'namespace': '{project_settings}/global/sync_server/sites/{site}/credentials_url/{platform}' # noqa: E501
- },
- # roots could be override only on Project leve, User cannot
- 'root': {'scope': [EditableScopes.PROJECT],
- 'label': "Roots",
- 'type': 'dict'}
- }
+ }
+ ]
return editable
def get_roots_config(self, anatomy=None):
diff --git a/openpype/modules/default_modules/sync_server/providers/lib.py b/openpype/modules/default_modules/sync_server/providers/lib.py
index 816ccca981..3daee366cf 100644
--- a/openpype/modules/default_modules/sync_server/providers/lib.py
+++ b/openpype/modules/default_modules/sync_server/providers/lib.py
@@ -1,5 +1,7 @@
from .gdrive import GDriveHandler
+from .dropbox import DropboxHandler
from .local_drive import LocalDriveHandler
+from .sftp import SFTPHandler
class ProviderFactory:
@@ -76,6 +78,14 @@ class ProviderFactory:
return provider_info[0].get_configurable_items()
+ def get_provider_cls(self, provider_code):
+ """
+ Returns class object for 'provider_code' to run class methods on.
+ """
+ provider_info = self._get_creator_info(provider_code)
+
+ return provider_info[0]
+
def _get_creator_info(self, provider):
"""
Collect all necessary info for provider. Currently only creator
@@ -103,4 +113,6 @@ factory = ProviderFactory()
# 7 denotes number of files that could be synced in single loop - learned by
# trial and error
factory.register_provider(GDriveHandler.CODE, GDriveHandler, 7)
+factory.register_provider(DropboxHandler.CODE, DropboxHandler, 10)
factory.register_provider(LocalDriveHandler.CODE, LocalDriveHandler, 50)
+factory.register_provider(SFTPHandler.CODE, SFTPHandler, 20)
diff --git a/openpype/modules/default_modules/sync_server/providers/local_drive.py b/openpype/modules/default_modules/sync_server/providers/local_drive.py
index 4b80ed44f2..8e5f170bc9 100644
--- a/openpype/modules/default_modules/sync_server/providers/local_drive.py
+++ b/openpype/modules/default_modules/sync_server/providers/local_drive.py
@@ -7,8 +7,6 @@ import time
from openpype.api import Logger, Anatomy
from .abstract_provider import AbstractProvider
-from ..utils import EditableScopes
-
log = Logger().get_logger("SyncServer")
@@ -30,18 +28,51 @@ class LocalDriveHandler(AbstractProvider):
return True
@classmethod
- def get_configurable_items(cls):
+ def get_system_settings_schema(cls):
"""
- Returns filtered dict of editable properties
+ Returns dict for editable properties on system settings level
+
+
+ Returns:
+ (list) of dict
+ """
+ return []
+
+ @classmethod
+ def get_project_settings_schema(cls):
+ """
+ Returns dict for editable properties on project settings level
+
+
+ Returns:
+ (list) of dict
+ """
+ # for non 'studio' sites, 'studio' is configured in Anatomy
+ editable = [
+ {
+ 'key': "roots",
+ 'label': "Roots",
+ 'type': 'dict'
+ }
+ ]
+ return editable
+
+ @classmethod
+ def get_local_settings_schema(cls):
+ """
+ Returns dict for editable properties on local settings level
+
Returns:
(dict)
"""
- editable = {
- 'root': {'scope': [EditableScopes.LOCAL],
- 'label': "Roots",
- 'type': 'dict'}
- }
+ editable = [
+ {
+ 'key': "roots",
+ 'label': "Roots",
+ 'type': 'dict'
+ }
+ ]
return editable
def upload_file(self, source_path, target_path,
diff --git a/openpype/modules/default_modules/sync_server/providers/resources/dropbox.png b/openpype/modules/default_modules/sync_server/providers/resources/dropbox.png
new file mode 100644
index 0000000000..6f56e3335b
Binary files /dev/null and b/openpype/modules/default_modules/sync_server/providers/resources/dropbox.png differ
diff --git a/openpype/modules/default_modules/sync_server/providers/resources/sftp.png b/openpype/modules/default_modules/sync_server/providers/resources/sftp.png
new file mode 100644
index 0000000000..56c7a5cca3
Binary files /dev/null and b/openpype/modules/default_modules/sync_server/providers/resources/sftp.png differ
diff --git a/openpype/modules/default_modules/sync_server/providers/sftp.py b/openpype/modules/default_modules/sync_server/providers/sftp.py
new file mode 100644
index 0000000000..07450265e2
--- /dev/null
+++ b/openpype/modules/default_modules/sync_server/providers/sftp.py
@@ -0,0 +1,461 @@
+import os
+import os.path
+import time
+import sys
+import six
+import threading
+import platform
+
+from openpype.api import Logger
+from openpype.api import get_system_settings
+from .abstract_provider import AbstractProvider
+log = Logger().get_logger("SyncServer")
+
+pysftp = None
+try:
+ import pysftp
+except (ImportError, SyntaxError):
+ pass
+
+ # handle imports from Python 2 hosts - in those only basic methods are used
+ log.warning("Import failed, imported from Python 2, operations will fail.")
+
+
+class SFTPHandler(AbstractProvider):
+ """
+ Implementation of SFTP API.
+
+ Authentication could be done in 2 ways:
+ - user and password
+ - ssh key file for user (optionally password for ssh key)
+
+ Settings could be overwritten per project.
+
+ """
+ CODE = 'sftp'
+ LABEL = 'SFTP'
+
+ def __init__(self, project_name, site_name, tree=None, presets=None):
+ self.presets = None
+ self.active = False
+ self.project_name = project_name
+ self.site_name = site_name
+ self.root = None
+ self._conn = None
+
+ self.presets = presets
+ if not self.presets:
+ log.warning("Sync Server: There are no presets for {}.".
+ format(site_name))
+ return
+
+ provider_presets = self.presets.get(self.CODE)
+ if not provider_presets:
+ msg = "Sync Server: No provider presets for {}".format(self.CODE)
+ log.warning(msg)
+ return
+
+ # store to instance for reconnect
+ self.sftp_host = provider_presets["sftp_host"]
+ self.sftp_port = provider_presets["sftp_port"]
+ self.sftp_user = provider_presets["sftp_user"]
+ self.sftp_pass = provider_presets["sftp_pass"]
+ self.sftp_key = provider_presets["sftp_key"]
+ self.sftp_key_pass = provider_presets["sftp_key_pass"]
+
+ self._tree = None
+ self.active = True
+
+ @property
+ def conn(self):
+ """SFTP connection, cannot be used in all places though."""
+ if not self._conn:
+ self._conn = self._get_conn()
+
+ return self._conn
+
+ def is_active(self):
+ """
+ Returns True if provider is activated, eg. has working credentials.
+ Returns:
+ (boolean)
+ """
+ return self.conn is not None
+
+ @classmethod
+ def get_system_settings_schema(cls):
+ """
+ Returns dict for editable properties on system settings level
+
+
+ Returns:
+ (list) of dict
+ """
+ return []
+
+ @classmethod
+ def get_project_settings_schema(cls):
+ """
+ Returns dict for editable properties on project settings level
+
+ Currently not implemented in Settings yet!
+
+ Returns:
+ (list) of dict
+ """
+ # {platform} tells that value is multiplatform and only specific OS
+ # should be returned
+ editable = [
+ # credentials could be overriden on Project or User level
+ {
+ 'key': "sftp_server",
+ 'label': "SFTP host name",
+ 'type': 'text'
+ },
+ {
+ "type": "number",
+ "key": "sftp_port",
+ "label": "SFTP port"
+ },
+ {
+ 'key': "sftp_user",
+ 'label': "SFTP user name",
+ 'type': 'text'
+ },
+ {
+ 'key': "sftp_pass",
+ 'label': "SFTP password",
+ 'type': 'text'
+ },
+ {
+ 'key': "sftp_key",
+ 'label': "SFTP user ssh key",
+ 'type': 'path'
+ },
+ {
+ 'key': "sftp_key_pass",
+ 'label': "SFTP user ssh key password",
+ 'type': 'text'
+ },
+ # roots could be overriden only on Project leve, User cannot
+ {
+ 'key': "roots",
+ 'label': "Roots",
+ 'type': 'dict'
+ }
+ ]
+ return editable
+
+ @classmethod
+ def get_local_settings_schema(cls):
+ """
+ Returns dict for editable properties on local settings level
+
+ Currently not implemented in Settings yet!
+
+ Returns:
+ (dict)
+ """
+ editable = [
+ # credentials could be override on Project or User level
+ {
+ 'key': "sftp_user",
+ 'label': "SFTP user name",
+ 'type': 'text'
+ },
+ {
+ 'key': "sftp_pass",
+ 'label': "SFTP password",
+ 'type': 'text'
+ },
+ {
+ 'key': "sftp_key",
+ 'label': "SFTP user ssh key",
+ 'type': 'path'
+ },
+ {
+ 'key': "sftp_key_pass",
+ 'label': "SFTP user ssh key password",
+ 'type': 'text'
+ }
+ ]
+ return editable
+
+ def get_roots_config(self, anatomy=None):
+ """
+ Returns root values for path resolving
+
+ Use only Settings as GDrive cannot be modified by Local Settings
+
+ Returns:
+ (dict) - {"root": {"root": "/My Drive"}}
+ OR
+ {"root": {"root_ONE": "value", "root_TWO":"value}}
+ Format is importing for usage of python's format ** approach
+ """
+ # roots cannot be locally overridden
+ return self.presets['root']
+
+ def get_tree(self):
+ """
+ Building of the folder tree could be potentially expensive,
+ constructor provides argument that could inject previously created
+ tree.
+ Tree structure must be handled in thread safe fashion!
+ Returns:
+ (dictionary) - url to id mapping
+ """
+ # not needed in this provider
+ pass
+
+ def create_folder(self, path):
+ """
+ Create all nonexistent folders and subfolders in 'path'.
+ Updates self._tree structure with new paths
+
+ Args:
+ path (string): absolute path, starts with GDrive root,
+ without filename
+ Returns:
+ (string) folder id of lowest subfolder from 'path'
+ """
+ self.conn.makedirs(path)
+
+ return os.path.basename(path)
+
+ def upload_file(self, source_path, target_path,
+ server, collection, file, representation, site,
+ overwrite=False):
+ """
+ Uploads single file from 'source_path' to destination 'path'.
+ It creates all folders on the path if are not existing.
+
+ Args:
+ source_path (string):
+ target_path (string): absolute path with or without name of a file
+ overwrite (boolean): replace existing file
+
+ arguments for saving progress:
+ server (SyncServer): server instance to call update_db on
+ collection (str): name of collection
+ file (dict): info about uploaded file (matches structure from db)
+ representation (dict): complete repre containing 'file'
+ site (str): site name
+
+ Returns:
+ (string) file_id of created/modified file ,
+ throws FileExistsError, FileNotFoundError exceptions
+ """
+ if not os.path.isfile(source_path):
+ raise FileNotFoundError("Source file {} doesn't exist."
+ .format(source_path))
+
+ if self.file_path_exists(target_path):
+ if not overwrite:
+ raise ValueError("File {} exists, set overwrite".
+ format(target_path))
+
+ thread = threading.Thread(target=self._upload,
+ args=(source_path, target_path))
+ thread.start()
+ self._mark_progress(collection, file, representation, server,
+ site, source_path, target_path, "upload")
+
+ return os.path.basename(target_path)
+
+ def _upload(self, source_path, target_path):
+ print("copying {}->{}".format(source_path, target_path))
+ conn = self._get_conn()
+ conn.put(source_path, target_path)
+
+ def download_file(self, source_path, target_path,
+ server, collection, file, representation, site,
+ overwrite=False):
+ """
+ Downloads single file from 'source_path' (remote) to 'target_path'.
+ It creates all folders on the local_path if are not existing.
+ By default existing file on 'target_path' will trigger an exception
+
+ Args:
+ source_path (string): absolute path on provider
+ target_path (string): absolute path with or without name of a file
+ overwrite (boolean): replace existing file
+
+ arguments for saving progress:
+ server (SyncServer): server instance to call update_db on
+ collection (str): name of collection
+ file (dict): info about uploaded file (matches structure from db)
+ representation (dict): complete repre containing 'file'
+ site (str): site name
+
+ Returns:
+ (string) file_id of created/modified file ,
+ throws FileExistsError, FileNotFoundError exceptions
+ """
+ if not self.file_path_exists(source_path):
+ raise FileNotFoundError("Source file {} doesn't exist."
+ .format(source_path))
+
+ if os.path.isfile(target_path):
+ if not overwrite:
+ raise ValueError("File {} exists, set overwrite".
+ format(target_path))
+
+ thread = threading.Thread(target=self._download,
+ args=(source_path, target_path))
+ thread.start()
+ self._mark_progress(collection, file, representation, server,
+ site, source_path, target_path, "download")
+
+ return os.path.basename(target_path)
+
+ def _download(self, source_path, target_path):
+ print("downloading {}->{}".format(source_path, target_path))
+ conn = self._get_conn()
+ conn.get(source_path, target_path)
+
+ def delete_file(self, path):
+ """
+ Deletes file from 'path'. Expects path to specific file.
+
+ Args:
+ path: absolute path to particular file
+
+ Returns:
+ None
+ """
+ if not self.file_path_exists(path):
+ raise FileNotFoundError("File {} to be deleted doesn't exist."
+ .format(path))
+
+ self.conn.remove(path)
+
+ def list_folder(self, folder_path):
+ """
+ List all files and subfolders of particular path non-recursively.
+
+ Args:
+ folder_path (string): absolut path on provider
+ Returns:
+ (list)
+ """
+ return list(pysftp.path_advance(folder_path))
+
+ def folder_path_exists(self, file_path):
+ """
+ Checks if path from 'file_path' exists. If so, return its
+ folder id.
+ Args:
+ file_path (string): path with / as a separator
+ Returns:
+ (string) folder id or False
+ """
+ if not file_path:
+ return False
+
+ return self.conn.isdir(file_path)
+
+ def file_path_exists(self, file_path):
+ """
+ Checks if 'file_path' exists on GDrive
+
+ Args:
+ file_path (string): separated by '/', from root, with file name
+ Returns:
+ (dictionary|boolean) file metadata | False if not found
+ """
+ if not file_path:
+ return False
+
+ return self.conn.isfile(file_path)
+
+ @classmethod
+ def get_presets(cls):
+ """
+ Get presets for this provider
+ Returns:
+ (dictionary) of configured sites
+ """
+ provider_presets = None
+ try:
+ provider_presets = (
+ get_system_settings()["modules"]
+ ["sync_server"]
+ ["providers"]
+ ["sftp"]
+ )
+ except KeyError:
+ log.info(("Sync Server: There are no presets for SFTP " +
+ "provider.").
+ format(str(provider_presets)))
+ return
+ return provider_presets
+
+ def _get_conn(self):
+ """
+ Returns fresh sftp connection.
+
+ It seems that connection cannot be cached into self.conn, at least
+ for get and put which run in separate threads.
+
+ Returns:
+ pysftp.Connection
+ """
+ if not pysftp:
+ raise ImportError
+
+ cnopts = pysftp.CnOpts()
+ cnopts.hostkeys = None
+
+ conn_params = {
+ 'host': self.sftp_host,
+ 'port': self.sftp_port,
+ 'username': self.sftp_user,
+ 'cnopts': cnopts
+ }
+ if self.sftp_pass and self.sftp_pass.strip():
+ conn_params['password'] = self.sftp_pass
+ if self.sftp_key: # expects .pem format, not .ppk!
+ conn_params['private_key'] = \
+ self.sftp_key[platform.system().lower()]
+ if self.sftp_key_pass:
+ conn_params['private_key_pass'] = self.sftp_key_pass
+
+ return pysftp.Connection(**conn_params)
+
+ def _mark_progress(self, collection, file, representation, server, site,
+ source_path, target_path, direction):
+ """
+ Updates progress field in DB by values 0-1.
+
+ Compares file sizes of source and target.
+ """
+ pass
+ if direction == "upload":
+ source_file_size = os.path.getsize(source_path)
+ else:
+ source_file_size = self.conn.stat(source_path).st_size
+
+ target_file_size = 0
+ last_tick = status_val = None
+ while source_file_size != target_file_size:
+ if not last_tick or \
+ time.time() - last_tick >= server.LOG_PROGRESS_SEC:
+ status_val = target_file_size / source_file_size
+ last_tick = time.time()
+ log.debug(direction + "ed %d%%." % int(status_val * 100))
+ server.update_db(collection=collection,
+ new_file_id=None,
+ file=file,
+ representation=representation,
+ site=site,
+ progress=status_val
+ )
+ try:
+ if direction == "upload":
+ target_file_size = self.conn.stat(target_path).st_size
+ else:
+ target_file_size = os.path.getsize(target_path)
+ except FileNotFoundError:
+ pass
+ time.sleep(0.5)
diff --git a/openpype/modules/default_modules/sync_server/sync_server.py b/openpype/modules/default_modules/sync_server/sync_server.py
index 638a4a367f..2227ec9366 100644
--- a/openpype/modules/default_modules/sync_server/sync_server.py
+++ b/openpype/modules/default_modules/sync_server/sync_server.py
@@ -221,6 +221,7 @@ def _get_configured_sites_from_setting(module, project_name, project_setting):
return configured_sites
+
class SyncServerThread(threading.Thread):
"""
Separate thread running synchronization server with asyncio loop.
diff --git a/openpype/modules/default_modules/sync_server/sync_server_module.py b/openpype/modules/default_modules/sync_server/sync_server_module.py
index e65a410551..f2e9237542 100644
--- a/openpype/modules/default_modules/sync_server/sync_server_module.py
+++ b/openpype/modules/default_modules/sync_server/sync_server_module.py
@@ -16,14 +16,13 @@ from openpype.api import (
get_local_site_id)
from openpype.lib import PypeLogger
from openpype.settings.lib import (
- get_default_project_settings,
get_default_anatomy_settings,
get_anatomy_settings)
from .providers.local_drive import LocalDriveHandler
from .providers import lib
-from .utils import time_function, SyncStatus, EditableScopes
+from .utils import time_function, SyncStatus
log = PypeLogger().get_logger("SyncServer")
@@ -399,204 +398,251 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return remote_site
- def get_local_settings_schema(self):
- """Wrapper for Local settings - all projects incl. Default"""
- return self.get_configurable_items(EditableScopes.LOCAL)
-
- def get_configurable_items(self, scope=None):
+ def get_local_normalized_site(self, site_name):
"""
- Returns list of sites that could be configurable for all projects.
+ Return 'site_name' or 'local' if 'site_name' is local id.
- Could be filtered by 'scope' argument (list)
-
- Args:
- scope (list of utils.EditableScope)
-
- Returns:
- (dict of list of dict)
- {
- siteA : [
- {
- key:"root", label:"root",
- "value":"{'work': 'c:/projects'}",
- "type": "dict",
- "children":[
- { "key": "work",
- "type": "text",
- "value": "c:/projects"}
- ]
- },
- {
- key:"credentials_url", label:"Credentials url",
- "value":"'c:/projects/cred.json'", "type": "text",
- "namespace": "{project_setting}/global/sync_server/
- sites"
- }
- ]
- }
+ In some places Settings or Local Settings require 'local' instead
+ of real site name.
"""
- editable = {}
- applicable_projects = list(self.connection.projects())
- applicable_projects.append(None)
- for project in applicable_projects:
- project_name = None
- if project:
- project_name = project["name"]
+ if site_name == get_local_site_id():
+ site_name = self.LOCAL_SITE
- items = self.get_configurable_items_for_project(project_name,
- scope)
- editable.update(items)
+ return site_name
- return editable
+ # Methods for Settings UI to draw appropriate forms
+ @classmethod
+ def get_system_settings_schema(cls):
+ """ Gets system level schema of configurable items
- def get_local_settings_schema_for_project(self, project_name):
- """Wrapper for Local settings - for specific 'project_name'"""
- return self.get_configurable_items_for_project(project_name,
- EditableScopes.LOCAL)
-
- def get_configurable_items_for_project(self, project_name=None,
- scope=None):
+ Used for Setting UI to provide forms.
"""
- Returns list of items that could be configurable for specific
- 'project_name'
+ ret_dict = {}
+ for provider_code in lib.factory.providers:
+ ret_dict[provider_code] = \
+ lib.factory.get_provider_cls(provider_code). \
+ get_system_settings_schema()
- Args:
- project_name (str) - None > default project,
- scope (list of utils.EditableScope)
- (optional, None is all scopes, default is LOCAL)
+ return ret_dict
- Returns:
- (dict of list of dict)
- {
- siteA : [
- {
- key:"root", label:"root",
- "type": "dict",
- "children":[
- { "key": "work",
- "type": "text",
- "value": "c:/projects"}
- ]
- },
- {
- key:"credentials_url", label:"Credentials url",
- "value":"'c:/projects/cred.json'", "type": "text",
- "namespace": "{project_setting}/global/sync_server/
- sites"
- }
- ]
- }
+ @classmethod
+ def get_project_settings_schema(cls):
+ """ Gets project level schema of configurable items.
+
+ It is not using Setting! Used for Setting UI to provide forms.
"""
- allowed_sites = set()
- sites = self.get_all_site_configs(project_name)
- if project_name:
- # Local Settings can select only from allowed sites for project
- allowed_sites.update(set(self.get_active_sites(project_name)))
- allowed_sites.update(set(self.get_remote_sites(project_name)))
+ ret_dict = {}
+ for provider_code in lib.factory.providers:
+ ret_dict[provider_code] = \
+ lib.factory.get_provider_cls(provider_code). \
+ get_project_settings_schema()
- editable = {}
- for site_name in sites.keys():
- if allowed_sites and site_name not in allowed_sites:
- continue
+ return ret_dict
- items = self.get_configurable_items_for_site(project_name,
- site_name,
- scope)
- # Local Settings need 'local' instead of real value
- site_name = site_name.replace(get_local_site_id(), 'local')
- editable[site_name] = items
+ @classmethod
+ def get_local_settings_schema(cls):
+ """ Gets local level schema of configurable items.
- return editable
-
- def get_local_settings_schema_for_site(self, project_name, site_name):
- """Wrapper for Local settings - for particular 'site_name and proj."""
- return self.get_configurable_items_for_site(project_name,
- site_name,
- EditableScopes.LOCAL)
-
- def get_configurable_items_for_site(self, project_name=None,
- site_name=None,
- scope=None):
+ It is not using Setting! Used for Setting UI to provide forms.
"""
- Returns list of items that could be configurable.
+ ret_dict = {}
+ for provider_code in lib.factory.providers:
+ ret_dict[provider_code] = \
+ lib.factory.get_provider_cls(provider_code). \
+ get_local_settings_schema()
- Args:
- project_name (str) - None > default project
- site_name (str)
- scope (list of utils.EditableScope)
- (optional, None is all scopes)
+ return ret_dict
- Returns:
- (list)
- [
- {
- key:"root", label:"root", type:"dict",
- "children":[
- { "key": "work",
- "type": "text",
- "value": "c:/projects"}
- ]
- }, ...
- ]
- """
- provider_name = self.get_provider_for_site(site=site_name)
- items = lib.factory.get_provider_configurable_items(provider_name)
-
- if project_name:
- sync_s = self.get_sync_project_setting(project_name,
- exclude_locals=True,
- cached=False)
- else:
- sync_s = get_default_project_settings(exclude_locals=True)
- sync_s = sync_s["global"]["sync_server"]
- sync_s["sites"].update(
- self._get_default_site_configs(self.enabled))
-
- editable = []
- if type(scope) is not list:
- scope = [scope]
- scope = set(scope)
- for key, properties in items.items():
- if scope is None or scope.intersection(set(properties["scope"])):
- val = sync_s.get("sites", {}).get(site_name, {}).get(key)
-
- item = {
- "key": key,
- "label": properties["label"],
- "type": properties["type"]
- }
-
- if properties.get("namespace"):
- item["namespace"] = properties.get("namespace")
- if "platform" in item["namespace"]:
- try:
- if val:
- val = val[platform.system().lower()]
- except KeyError:
- st = "{}'s field value {} should be".format(key, val) # noqa: E501
- log.error(st + " multiplatform dict")
-
- item["namespace"] = item["namespace"].replace('{site}',
- site_name)
- children = []
- if properties["type"] == "dict":
- if val:
- for val_key, val_val in val.items():
- child = {
- "type": "text",
- "key": val_key,
- "value": val_val
- }
- children.append(child)
-
- if properties["type"] == "dict":
- item["children"] = children
- else:
- item["value"] = val
-
- editable.append(item)
-
- return editable
+ # Needs to be refactored after Settings are updated
+ # # Methods for Settings to get appriate values to fill forms
+ # def get_configurable_items(self, scope=None):
+ # """
+ # Returns list of sites that could be configurable for all projects
+ #
+ # Could be filtered by 'scope' argument (list)
+ #
+ # Args:
+ # scope (list of utils.EditableScope)
+ #
+ # Returns:
+ # (dict of list of dict)
+ # {
+ # siteA : [
+ # {
+ # key:"root", label:"root",
+ # "value":"{'work': 'c:/projects'}",
+ # "type": "dict",
+ # "children":[
+ # { "key": "work",
+ # "type": "text",
+ # "value": "c:/projects"}
+ # ]
+ # },
+ # {
+ # key:"credentials_url", label:"Credentials url",
+ # "value":"'c:/projects/cred.json'", "type": "text", # noqa: E501
+ # "namespace": "{project_setting}/global/sync_server/ # noqa: E501
+ # sites"
+ # }
+ # ]
+ # }
+ # """
+ # editable = {}
+ # applicable_projects = list(self.connection.projects())
+ # applicable_projects.append(None)
+ # for project in applicable_projects:
+ # project_name = None
+ # if project:
+ # project_name = project["name"]
+ #
+ # items = self.get_configurable_items_for_project(project_name,
+ # scope)
+ # editable.update(items)
+ #
+ # return editable
+ #
+ # def get_local_settings_schema_for_project(self, project_name):
+ # """Wrapper for Local settings - for specific 'project_name'"""
+ # return self.get_configurable_items_for_project(project_name,
+ # EditableScopes.LOCAL)
+ #
+ # def get_configurable_items_for_project(self, project_name=None,
+ # scope=None):
+ # """
+ # Returns list of items that could be configurable for specific
+ # 'project_name'
+ #
+ # Args:
+ # project_name (str) - None > default project,
+ # scope (list of utils.EditableScope)
+ # (optional, None is all scopes, default is LOCAL)
+ #
+ # Returns:
+ # (dict of list of dict)
+ # {
+ # siteA : [
+ # {
+ # key:"root", label:"root",
+ # "type": "dict",
+ # "children":[
+ # { "key": "work",
+ # "type": "text",
+ # "value": "c:/projects"}
+ # ]
+ # },
+ # {
+ # key:"credentials_url", label:"Credentials url",
+ # "value":"'c:/projects/cred.json'", "type": "text",
+ # "namespace": "{project_setting}/global/sync_server/
+ # sites"
+ # }
+ # ]
+ # }
+ # """
+ # allowed_sites = set()
+ # sites = self.get_all_site_configs(project_name)
+ # if project_name:
+ # # Local Settings can select only from allowed sites for project
+ # allowed_sites.update(set(self.get_active_sites(project_name)))
+ # allowed_sites.update(set(self.get_remote_sites(project_name)))
+ #
+ # editable = {}
+ # for site_name in sites.keys():
+ # if allowed_sites and site_name not in allowed_sites:
+ # continue
+ #
+ # items = self.get_configurable_items_for_site(project_name,
+ # site_name,
+ # scope)
+ # # Local Settings need 'local' instead of real value
+ # site_name = site_name.replace(get_local_site_id(), 'local')
+ # editable[site_name] = items
+ #
+ # return editable
+ #
+ # def get_configurable_items_for_site(self, project_name=None,
+ # site_name=None,
+ # scope=None):
+ # """
+ # Returns list of items that could be configurable.
+ #
+ # Args:
+ # project_name (str) - None > default project
+ # site_name (str)
+ # scope (list of utils.EditableScope)
+ # (optional, None is all scopes)
+ #
+ # Returns:
+ # (list)
+ # [
+ # {
+ # key:"root", label:"root", type:"dict",
+ # "children":[
+ # { "key": "work",
+ # "type": "text",
+ # "value": "c:/projects"}
+ # ]
+ # }, ...
+ # ]
+ # """
+ # provider_name = self.get_provider_for_site(site=site_name)
+ # items = lib.factory.get_provider_configurable_items(provider_name)
+ #
+ # if project_name:
+ # sync_s = self.get_sync_project_setting(project_name,
+ # exclude_locals=True,
+ # cached=False)
+ # else:
+ # sync_s = get_default_project_settings(exclude_locals=True)
+ # sync_s = sync_s["global"]["sync_server"]
+ # sync_s["sites"].update(
+ # self._get_default_site_configs(self.enabled))
+ #
+ # editable = []
+ # if type(scope) is not list:
+ # scope = [scope]
+ # scope = set(scope)
+ # for key, properties in items.items():
+ # if scope is None or scope.intersection(set(properties["scope"])):
+ # val = sync_s.get("sites", {}).get(site_name, {}).get(key)
+ #
+ # item = {
+ # "key": key,
+ # "label": properties["label"],
+ # "type": properties["type"]
+ # }
+ #
+ # if properties.get("namespace"):
+ # item["namespace"] = properties.get("namespace")
+ # if "platform" in item["namespace"]:
+ # try:
+ # if val:
+ # val = val[platform.system().lower()]
+ # except KeyError:
+ # st = "{}'s field value {} should be".format(key, val) # noqa: E501
+ # log.error(st + " multiplatform dict")
+ #
+ # item["namespace"] = item["namespace"].replace('{site}',
+ # site_name)
+ # children = []
+ # if properties["type"] == "dict":
+ # if val:
+ # for val_key, val_val in val.items():
+ # child = {
+ # "type": "text",
+ # "key": val_key,
+ # "value": val_val
+ # }
+ # children.append(child)
+ #
+ # if properties["type"] == "dict":
+ # item["children"] = children
+ # else:
+ # item["value"] = val
+ #
+ # editable.append(item)
+ #
+ # return editable
def reset_timer(self):
"""
@@ -611,7 +657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
enabled_projects = []
if self.enabled:
- for project in self.connection.projects():
+ for project in self.connection.projects(projection={"name": 1}):
project_name = project["name"]
project_settings = self.get_sync_project_setting(project_name)
if project_settings and project_settings.get("enabled"):
@@ -646,9 +692,6 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
return sites
- def connect_with_modules(self, *_a, **kw):
- return
-
def tray_init(self):
"""
Actual initialization of Sync Server.
@@ -781,17 +824,22 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
def _prepare_sync_project_settings(self, exclude_locals):
sync_project_settings = {}
system_sites = self.get_all_site_configs()
- for collection in self.connection.database.collection_names(False):
+ project_docs = self.connection.projects(
+ projection={"name": 1},
+ only_active=True
+ )
+ for project_doc in project_docs:
+ project_name = project_doc["name"]
sites = copy.deepcopy(system_sites) # get all configured sites
proj_settings = self._parse_sync_settings_from_settings(
- get_project_settings(collection,
+ get_project_settings(project_name,
exclude_locals=exclude_locals))
sites.update(self._get_default_site_configs(
- proj_settings["enabled"], collection))
+ proj_settings["enabled"], project_name))
sites.update(proj_settings['sites'])
proj_settings["sites"] = sites
- sync_project_settings[collection] = proj_settings
+ sync_project_settings[project_name] = proj_settings
if not sync_project_settings:
log.info("No enabled and configured projects for sync.")
return sync_project_settings
diff --git a/openpype/modules/default_modules/sync_server/tray/app.py b/openpype/modules/default_modules/sync_server/tray/app.py
index 106076d81c..fc8558bdbc 100644
--- a/openpype/modules/default_modules/sync_server/tray/app.py
+++ b/openpype/modules/default_modules/sync_server/tray/app.py
@@ -26,7 +26,7 @@ class SyncServerWindow(QtWidgets.QDialog):
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setStyleSheet(style.load_stylesheet())
- self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath()))
+ self.setWindowIcon(QtGui.QIcon(resources.get_openpype_icon_filepath()))
self.resize(1450, 700)
self.timer = QtCore.QTimer()
@@ -77,19 +77,36 @@ class SyncServerWindow(QtWidgets.QDialog):
self.setWindowTitle("Sync Queue")
self.projects.project_changed.connect(
- lambda: repres.table_view.model().set_project(
- self.projects.current_project))
+ self._on_project_change
+ )
self.pause_btn.clicked.connect(self._pause)
self.pause_btn.setAutoDefault(False)
self.pause_btn.setDefault(False)
repres.message_generated.connect(self._update_message)
+ self.projects.message_generated.connect(self._update_message)
self.representationWidget = repres
+ def _on_project_change(self):
+ if self.projects.current_project is None:
+ return
+
+ self.representationWidget.table_view.model().set_project(
+ self.projects.current_project
+ )
+
+ project_name = self.projects.current_project
+ if not self.sync_server.get_sync_project_setting(project_name):
+ self.projects.message_generated.emit(
+ "Project {} not active anymore".format(project_name))
+ self.projects.refresh()
+ return
+
def showEvent(self, event):
self.representationWidget.model.set_project(
self.projects.current_project)
+ self.projects.refresh()
self._set_running(True)
super().showEvent(event)
diff --git a/openpype/modules/default_modules/sync_server/tray/models.py b/openpype/modules/default_modules/sync_server/tray/models.py
index 8c86d3b98f..5642c5b34a 100644
--- a/openpype/modules/default_modules/sync_server/tray/models.py
+++ b/openpype/modules/default_modules/sync_server/tray/models.py
@@ -5,7 +5,7 @@ from bson.objectid import ObjectId
from Qt import QtCore
from Qt.QtCore import Qt
-from avalon.tools.delegates import pretty_timestamp
+from openpype.tools.utils.delegates import pretty_timestamp
from avalon.vendor import qtawesome
from openpype.lib import PypeLogger
@@ -17,25 +17,6 @@ from . import lib
log = PypeLogger().get_logger("SyncServer")
-class ProjectModel(QtCore.QAbstractListModel):
- def __init__(self, *args, projects=None, **kwargs):
- super(ProjectModel, self).__init__(*args, **kwargs)
- self.projects = projects or []
-
- def data(self, index, role):
- if role == Qt.DisplayRole:
- # See below for the data structure.
- status, text = self.projects[index.row()]
- # Return the todo text only.
- return text
-
- def rowCount(self, _index):
- return len(self.todos)
-
- def columnCount(self, _index):
- return len(self._header)
-
-
class _SyncRepresentationModel(QtCore.QAbstractTableModel):
COLUMN_LABELS = []
@@ -320,6 +301,10 @@ class _SyncRepresentationModel(QtCore.QAbstractTableModel):
"""
self._project = project
self.sync_server.set_sync_project_settings()
+ # project might have been deactivated in the meantime
+ if not self.sync_server.get_sync_project_setting(project):
+ return
+
self.active_site = self.sync_server.get_active_site(self.project)
self.remote_site = self.sync_server.get_remote_site(self.project)
self.refresh()
diff --git a/openpype/modules/default_modules/sync_server/tray/widgets.py b/openpype/modules/default_modules/sync_server/tray/widgets.py
index c9160733a0..45537c1c2e 100644
--- a/openpype/modules/default_modules/sync_server/tray/widgets.py
+++ b/openpype/modules/default_modules/sync_server/tray/widgets.py
@@ -6,15 +6,12 @@ from functools import partial
from Qt import QtWidgets, QtCore, QtGui
from Qt.QtCore import Qt
-from openpype.tools.settings import (
- ProjectListWidget,
- style
-)
+from openpype.tools.settings import style
from openpype.api import get_local_site_id
from openpype.lib import PypeLogger
-from avalon.tools.delegates import pretty_timestamp
+from openpype.tools.utils.delegates import pretty_timestamp
from avalon.vendor import qtawesome
from .models import (
@@ -28,28 +25,58 @@ from . import delegates
log = PypeLogger().get_logger("SyncServer")
-class SyncProjectListWidget(ProjectListWidget):
+class SyncProjectListWidget(QtWidgets.QWidget):
"""
Lists all projects that are synchronized to choose from
"""
+ project_changed = QtCore.Signal()
+ message_generated = QtCore.Signal(str)
def __init__(self, sync_server, parent):
super(SyncProjectListWidget, self).__init__(parent)
+ self.setObjectName("ProjectListWidget")
+
+ self._parent = parent
+
+ label_widget = QtWidgets.QLabel("Projects", self)
+ project_list = QtWidgets.QListView(self)
+ project_model = QtGui.QStandardItemModel()
+ project_list.setModel(project_model)
+ project_list.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
+
+ # Do not allow editing
+ project_list.setEditTriggers(
+ QtWidgets.QAbstractItemView.EditTrigger.NoEditTriggers
+ )
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.setContentsMargins(0, 0, 0, 0)
+ layout.setSpacing(3)
+ layout.addWidget(label_widget, 0)
+ layout.addWidget(project_list, 1)
+
+ project_list.customContextMenuRequested.connect(self._on_context_menu)
+ project_list.selectionModel().currentChanged.connect(
+ self._on_index_change
+ )
+
+ self.project_model = project_model
+ self.project_list = project_list
self.sync_server = sync_server
- self.project_list.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
- self.project_list.customContextMenuRequested.connect(
- self._on_context_menu)
+ self.current_project = None
self.project_name = None
self.local_site = None
+ self.remote_site = None
self.icons = {}
- self.layout().setContentsMargins(0, 0, 0, 0)
+ def _on_index_change(self, new_idx, _old_idx):
+ project_name = new_idx.data(QtCore.Qt.DisplayRole)
- def validate_context_change(self):
- return True
+ self.current_project = project_name
+ self.project_changed.emit()
def refresh(self):
- model = self.project_list.model()
+ model = self.project_model
model.clear()
project_name = None
@@ -70,11 +97,15 @@ class SyncProjectListWidget(ProjectListWidget):
QtCore.Qt.DisplayRole
)
if not self.current_project:
- self.current_project = self.project_list.model().item(0). \
- data(QtCore.Qt.DisplayRole)
+ self.current_project = model.item(0).data(QtCore.Qt.DisplayRole)
if project_name:
self.local_site = self.sync_server.get_active_site(project_name)
+ self.remote_site = self.sync_server.get_remote_site(project_name)
+
+ def _can_edit(self):
+ """Returns true if some site is user local site, eg. could edit"""
+ return get_local_site_id() in (self.local_site, self.remote_site)
def _get_icon(self, status):
if not self.icons.get(status):
@@ -98,9 +129,7 @@ class SyncProjectListWidget(ProjectListWidget):
menu = QtWidgets.QMenu(self)
actions_mapping = {}
- can_edit = self.model.can_edit
-
- if can_edit:
+ if self._can_edit():
if self.sync_server.is_project_paused(self.project_name):
action = QtWidgets.QAction("Unpause")
actions_mapping[action] = self._unpause
diff --git a/openpype/modules/default_modules/sync_server/utils.py b/openpype/modules/default_modules/sync_server/utils.py
index d4fc29ff8a..85e4e03f77 100644
--- a/openpype/modules/default_modules/sync_server/utils.py
+++ b/openpype/modules/default_modules/sync_server/utils.py
@@ -29,7 +29,6 @@ def time_function(method):
kw['log_time'][name] = int((te - ts) * 1000)
else:
log.debug('%r %2.2f ms' % (method.__name__, (te - ts) * 1000))
- print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000))
return result
return timed
diff --git a/openpype/modules/default_modules/timers_manager/idle_threads.py b/openpype/modules/default_modules/timers_manager/idle_threads.py
new file mode 100644
index 0000000000..9ec27e659b
--- /dev/null
+++ b/openpype/modules/default_modules/timers_manager/idle_threads.py
@@ -0,0 +1,160 @@
+import time
+from Qt import QtCore
+from pynput import mouse, keyboard
+
+from openpype.lib import PypeLogger
+
+
+class IdleItem:
+ """Python object holds information if state of idle changed.
+
+ This item is used to be independent from Qt objects.
+ """
+ def __init__(self):
+ self.changed = False
+
+ def reset(self):
+ self.changed = False
+
+ def set_changed(self, changed=True):
+ self.changed = changed
+
+
+class IdleManager(QtCore.QThread):
+ """ Measure user's idle time in seconds.
+ Idle time resets on keyboard/mouse input.
+ Is able to emit signals at specific time idle.
+ """
+ time_signals = {}
+ idle_time = 0
+ signal_reset_timer = QtCore.Signal()
+
+ def __init__(self):
+ super(IdleManager, self).__init__()
+ self.log = PypeLogger.get_logger(self.__class__.__name__)
+ self.signal_reset_timer.connect(self._reset_time)
+
+ self.idle_item = IdleItem()
+
+ self._is_running = False
+ self._mouse_thread = None
+ self._keyboard_thread = None
+
+ def add_time_signal(self, emit_time, signal):
+ """ If any module want to use IdleManager, need to use add_time_signal
+
+ Args:
+ emit_time(int): Time when signal will be emitted.
+ signal(QtCore.Signal): Signal that will be emitted
+ (without objects).
+ """
+ if emit_time not in self.time_signals:
+ self.time_signals[emit_time] = []
+ self.time_signals[emit_time].append(signal)
+
+ @property
+ def is_running(self):
+ return self._is_running
+
+ def _reset_time(self):
+ self.idle_time = 0
+
+ def stop(self):
+ self._is_running = False
+
+ def _on_mouse_destroy(self):
+ self._mouse_thread = None
+
+ def _on_keyboard_destroy(self):
+ self._keyboard_thread = None
+
+ def run(self):
+ self.log.info('IdleManager has started')
+ self._is_running = True
+
+ thread_mouse = MouseThread(self.idle_item)
+ thread_keyboard = KeyboardThread(self.idle_item)
+
+ thread_mouse.destroyed.connect(self._on_mouse_destroy)
+ thread_keyboard.destroyed.connect(self._on_keyboard_destroy)
+
+ self._mouse_thread = thread_mouse
+ self._keyboard_thread = thread_keyboard
+
+ thread_mouse.start()
+ thread_keyboard.start()
+
+ # Main loop here is each second checked if idle item changed state
+ while self._is_running:
+ if self.idle_item.changed:
+ self.idle_item.reset()
+ self.signal_reset_timer.emit()
+ else:
+ self.idle_time += 1
+
+ if self.idle_time in self.time_signals:
+ for signal in self.time_signals[self.idle_time]:
+ signal.emit()
+ time.sleep(1)
+
+ self._post_run()
+ self.log.info('IdleManager has stopped')
+
+ def _post_run(self):
+ # Stop threads if still exist
+ if self._mouse_thread is not None:
+ self._mouse_thread.signal_stop.emit()
+ self._mouse_thread.terminate()
+ self._mouse_thread.wait()
+
+ if self._keyboard_thread is not None:
+ self._keyboard_thread.signal_stop.emit()
+ self._keyboard_thread.terminate()
+ self._keyboard_thread.wait()
+
+
+class MouseThread(QtCore.QThread):
+ """Listens user's mouse movement."""
+ signal_stop = QtCore.Signal()
+
+ def __init__(self, idle_item):
+ super(MouseThread, self).__init__()
+ self.signal_stop.connect(self.stop)
+ self.m_listener = None
+ self.idle_item = idle_item
+
+ def stop(self):
+ if self.m_listener is not None:
+ self.m_listener.stop()
+
+ def on_move(self, *args, **kwargs):
+ self.idle_item.set_changed()
+
+ def run(self):
+ self.m_listener = mouse.Listener(on_move=self.on_move)
+ self.m_listener.start()
+
+
+class KeyboardThread(QtCore.QThread):
+ """Listens user's keyboard input
+ """
+ signal_stop = QtCore.Signal()
+
+ def __init__(self, idle_item):
+ super(KeyboardThread, self).__init__()
+ self.signal_stop.connect(self.stop)
+ self.k_listener = None
+ self.idle_item = idle_item
+
+ def stop(self):
+ if self.k_listener is not None:
+ listener = self.k_listener
+ self.k_listener = None
+ listener.stop()
+
+ def on_press(self, *args, **kwargs):
+ self.idle_item.set_changed()
+
+ def run(self):
+ self.k_listener = keyboard.Listener(on_press=self.on_press)
+ self.k_listener.start()
diff --git a/openpype/modules/default_modules/timers_manager/interfaces.py b/openpype/modules/default_modules/timers_manager/interfaces.py
deleted file mode 100644
index 179013cffe..0000000000
--- a/openpype/modules/default_modules/timers_manager/interfaces.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from abc import abstractmethod
-from openpype.modules import OpenPypeInterface
-
-
-class ITimersManager(OpenPypeInterface):
- timer_manager_module = None
-
- @abstractmethod
- def stop_timer(self):
- pass
-
- @abstractmethod
- def start_timer(self, data):
- pass
-
- def timer_started(self, data):
- if not self.timer_manager_module:
- return
-
- self.timer_manager_module.timer_started(self.id, data)
-
- def timer_stopped(self):
- if not self.timer_manager_module:
- return
-
- self.timer_manager_module.timer_stopped(self.id)
diff --git a/openpype/modules/default_modules/timers_manager/rest_api.py b/openpype/modules/default_modules/timers_manager/rest_api.py
index ac8d8b7b74..19b72d688b 100644
--- a/openpype/modules/default_modules/timers_manager/rest_api.py
+++ b/openpype/modules/default_modules/timers_manager/rest_api.py
@@ -1,3 +1,5 @@
+import json
+
from aiohttp.web_response import Response
from openpype.api import Logger
@@ -28,6 +30,11 @@ class TimersManagerModuleRestApi:
self.prefix + "/stop_timer",
self.stop_timer
)
+ self.server_manager.add_route(
+ "GET",
+ self.prefix + "/get_task_time",
+ self.get_task_time
+ )
async def start_timer(self, request):
data = await request.json()
@@ -48,3 +55,20 @@ class TimersManagerModuleRestApi:
async def stop_timer(self, request):
self.module.stop_timers()
return Response(status=200)
+
+ async def get_task_time(self, request):
+ data = await request.json()
+ try:
+ project_name = data['project_name']
+ asset_name = data['asset_name']
+ task_name = data['task_name']
+ except KeyError:
+ message = (
+ "Payload must contain fields 'project_name, 'asset_name',"
+ " 'task_name'"
+ )
+ log.warning(message)
+ return Response(text=message, status=404)
+
+ time = self.module.get_task_time(project_name, asset_name, task_name)
+ return Response(text=json.dumps(time))
diff --git a/openpype/modules/default_modules/timers_manager/timers_manager.py b/openpype/modules/default_modules/timers_manager/timers_manager.py
index 80f448095f..1199db0611 100644
--- a/openpype/modules/default_modules/timers_manager/timers_manager.py
+++ b/openpype/modules/default_modules/timers_manager/timers_manager.py
@@ -1,37 +1,107 @@
import os
-import collections
+import platform
from openpype.modules import OpenPypeModule
from openpype_interfaces import (
ITimersManager,
- ITrayService,
- IIdleManager,
- IWebServerRoutes
+ ITrayService
)
from avalon.api import AvalonMongoDB
-class TimersManager(
- OpenPypeModule, ITrayService, IIdleManager, IWebServerRoutes
-):
+class ExampleTimersManagerConnector:
+ """Timers manager can handle timers of multiple modules/addons.
+
+ Module must have object under `timers_manager_connector` attribute with
+ few methods. This is example class of the object that could be stored under
+ module.
+
+ Required methods are 'stop_timer' and 'start_timer'.
+
+ # TODO pass asset document instead of `hierarchy`
+ Example of `data` that are passed during changing timer:
+ ```
+ data = {
+ "project_name": project_name,
+ "task_name": task_name,
+ "task_type": task_type,
+ "hierarchy": hierarchy
+ }
+ ```
+ """
+ # Not needed at all
+ def __init__(self, module):
+ # Store timer manager module to be able call it's methods when needed
+ self._timers_manager_module = None
+
+ # Store module which want to use timers manager to have access
+ self._module = module
+
+ # Required
+ def stop_timer(self):
+ """Called by timers manager when module should stop timer."""
+ self._module.stop_timer()
+
+ # Required
+ def start_timer(self, data):
+ """Method called by timers manager when should start timer."""
+ self._module.start_timer(data)
+
+ # Optional
+ def register_timers_manager(self, timer_manager_module):
+ """Method called by timers manager where it's object is passed.
+
+ This is moment when timers manager module can be store to be able
+ call it's callbacks (e.g. timer started).
+ """
+ self._timers_manager_module = timer_manager_module
+
+ # Custom implementation
+ def timer_started(self, data):
+ """This is example of possibility to trigger callbacks on manager."""
+ if self._timers_manager_module is not None:
+ self._timers_manager_module.timer_started(self._module.id, data)
+
+ # Custom implementation
+ def timer_stopped(self):
+ if self._timers_manager_module is not None:
+ self._timers_manager_module.timer_stopped(self._module.id)
+
+
+class TimersManager(OpenPypeModule, ITrayService):
""" Handles about Timers.
Should be able to start/stop all timers at once.
- If IdleManager is imported then is able to handle about stop timers
- when user idles for a long time (set in presets).
+
+ To be able use this advantage module has to have attribute with name
+ `timers_manager_connector` which has two methods 'stop_timer'
+ and 'start_timer'. Optionally may have `register_timers_manager` where
+ object of TimersManager module is passed to be able call it's callbacks.
+
+ See `ExampleTimersManagerConnector`.
"""
name = "timers_manager"
label = "Timers Service"
+ _required_methods = (
+ "stop_timer",
+ "start_timer"
+ )
+
def initialize(self, modules_settings):
timers_settings = modules_settings[self.name]
self.enabled = timers_settings["enabled"]
- auto_stop = timers_settings["auto_stop"]
+
# When timer will stop if idle manager is running (minutes)
full_time = int(timers_settings["full_time"] * 60)
# How many minutes before the timer is stopped will popup the message
message_time = int(timers_settings["message_time"] * 60)
+ auto_stop = timers_settings["auto_stop"]
+ # Turn of auto stop on MacOs because pynput requires root permissions
+ if platform.system().lower() == "darwin" or full_time <= 0:
+ auto_stop = False
+
self.auto_stop = auto_stop
self.time_show_message = full_time - message_time
self.time_stop_timer = full_time
@@ -40,30 +110,46 @@ class TimersManager(
self.last_task = None
# Tray attributes
- self.signal_handler = None
- self.widget_user_idle = None
- self.signal_handler = None
+ self._signal_handler = None
+ self._widget_user_idle = None
+ self._idle_manager = None
- self.modules = []
+ self._connectors_by_module_id = {}
+ self._modules_by_id = {}
def tray_init(self):
+ if not self.auto_stop:
+ return
+
+ from .idle_threads import IdleManager
from .widget_user_idle import WidgetUserIdle, SignalHandler
- self.widget_user_idle = WidgetUserIdle(self)
- self.signal_handler = SignalHandler(self)
+
+ signal_handler = SignalHandler(self)
+ idle_manager = IdleManager()
+ widget_user_idle = WidgetUserIdle(self)
+ widget_user_idle.set_countdown_start(self.time_show_message)
+
+ idle_manager.signal_reset_timer.connect(
+ widget_user_idle.reset_countdown
+ )
+ idle_manager.add_time_signal(
+ self.time_show_message, signal_handler.signal_show_message
+ )
+ idle_manager.add_time_signal(
+ self.time_stop_timer, signal_handler.signal_stop_timers
+ )
+
+ self._signal_handler = signal_handler
+ self._widget_user_idle = widget_user_idle
+ self._idle_manager = idle_manager
def tray_start(self, *_a, **_kw):
- return
+ if self._idle_manager:
+ self._idle_manager.start()
def tray_exit(self):
- """Nothing special for TimersManager."""
- return
-
- def webserver_initialization(self, server_manager):
- """Implementation of IWebServerRoutes interface."""
- if self.tray_initialized:
- from .rest_api import TimersManagerModuleRestApi
- self.rest_api_obj = TimersManagerModuleRestApi(self,
- server_manager)
+ if self._idle_manager:
+ self._idle_manager.stop()
def start_timer(self, project_name, asset_name, task_name, hierarchy):
"""
@@ -105,18 +191,46 @@ class TimersManager(
}
self.timer_started(None, data)
+ def get_task_time(self, project_name, asset_name, task_name):
+ times = {}
+ for module_id, connector in self._connectors_by_module_id.items():
+ if hasattr(connector, "get_task_time"):
+ module = self._modules_by_id[module_id]
+ times[module.name] = connector.get_task_time(
+ project_name, asset_name, task_name
+ )
+ return times
+
def timer_started(self, source_id, data):
- for module in self.modules:
- if module.id != source_id:
- module.start_timer(data)
+ for module_id, connector in self._connectors_by_module_id.items():
+ if module_id == source_id:
+ continue
+
+ try:
+ connector.start_timer(data)
+ except Exception:
+ self.log.info(
+ "Failed to start timer on connector {}".format(
+ str(connector)
+ )
+ )
self.last_task = data
self.is_running = True
def timer_stopped(self, source_id):
- for module in self.modules:
- if module.id != source_id:
- module.stop_timer()
+ for module_id, connector in self._connectors_by_module_id.items():
+ if module_id == source_id:
+ continue
+
+ try:
+ connector.stop_timer()
+ except Exception:
+ self.log.info(
+ "Failed to stop timer on connector {}".format(
+ str(connector)
+ )
+ )
def restart_timers(self):
if self.last_task is not None:
@@ -126,84 +240,60 @@ class TimersManager(
if self.is_running is False:
return
- self.widget_user_idle.bool_not_stopped = False
- self.widget_user_idle.refresh_context()
+ if self._widget_user_idle is not None:
+ self._widget_user_idle.set_timer_stopped()
self.is_running = False
- for module in self.modules:
- module.stop_timer()
+ self.timer_stopped(None)
def connect_with_modules(self, enabled_modules):
for module in enabled_modules:
- if not isinstance(module, ITimersManager):
+ connector = getattr(module, "timers_manager_connector", None)
+ if connector is None:
continue
- module.timer_manager_module = self
- self.modules.append(module)
- def callbacks_by_idle_time(self):
- """Implementation of IIdleManager interface."""
- # Time when message is shown
- if not self.auto_stop:
- return {}
+ missing_methods = set()
+ for method_name in self._required_methods:
+ if not hasattr(connector, method_name):
+ missing_methods.add(method_name)
- callbacks = collections.defaultdict(list)
- callbacks[self.time_show_message].append(lambda: self.time_callback(0))
+ if missing_methods:
+ joined = ", ".join(
+ ['"{}"'.format(name for name in missing_methods)]
+ )
+ self.log.info((
+ "Module \"{}\" has missing required methods {}."
+ ).format(module.name, joined))
+ continue
- # Times when idle is between show widget and stop timers
- show_to_stop_range = range(
- self.time_show_message - 1, self.time_stop_timer
- )
- for num in show_to_stop_range:
- callbacks[num].append(lambda: self.time_callback(1))
+ self._connectors_by_module_id[module.id] = connector
+ self._modules_by_id[module.id] = module
- # Times when widget is already shown and user restart idle
- shown_and_moved_range = range(
- self.time_stop_timer - self.time_show_message
- )
- for num in shown_and_moved_range:
- callbacks[num].append(lambda: self.time_callback(1))
-
- # Time when timers are stopped
- callbacks[self.time_stop_timer].append(lambda: self.time_callback(2))
-
- return callbacks
-
- def time_callback(self, int_def):
- if not self.signal_handler:
- return
-
- if int_def == 0:
- self.signal_handler.signal_show_message.emit()
- elif int_def == 1:
- self.signal_handler.signal_change_label.emit()
- elif int_def == 2:
- self.signal_handler.signal_stop_timers.emit()
-
- def change_label(self):
- if self.is_running is False:
- return
-
- if (
- not self.idle_manager
- or self.widget_user_idle.bool_is_showed is False
- ):
- return
-
- if self.idle_manager.idle_time > self.time_show_message:
- value = self.time_stop_timer - self.idle_manager.idle_time
- else:
- value = 1 + (
- self.time_stop_timer -
- self.time_show_message -
- self.idle_manager.idle_time
- )
- self.widget_user_idle.change_count_widget(value)
+ # Optional method
+ if hasattr(connector, "register_timers_manager"):
+ try:
+ connector.register_timers_manager(self)
+ except Exception:
+ self.log.info((
+ "Failed to register timers manager"
+ " for connector of module \"{}\"."
+ ).format(module.name))
def show_message(self):
if self.is_running is False:
return
- if self.widget_user_idle.bool_is_showed is False:
- self.widget_user_idle.show()
+ if not self._widget_user_idle.is_showed():
+ self._widget_user_idle.reset_countdown()
+ self._widget_user_idle.show()
+
+ # Webserver module implementation
+ def webserver_initialization(self, server_manager):
+ """Add routes for timers to be able start/stop with rest api."""
+ if self.tray_initialized:
+ from .rest_api import TimersManagerModuleRestApi
+ self.rest_api_obj = TimersManagerModuleRestApi(
+ self, server_manager
+ )
def change_timer_from_host(self, project_name, asset_name, task_name):
"""Prepared method for calling change timers on REST api"""
diff --git a/openpype/modules/default_modules/timers_manager/widget_user_idle.py b/openpype/modules/default_modules/timers_manager/widget_user_idle.py
index 25b4e56650..1ecea74440 100644
--- a/openpype/modules/default_modules/timers_manager/widget_user_idle.py
+++ b/openpype/modules/default_modules/timers_manager/widget_user_idle.py
@@ -3,168 +3,193 @@ from openpype import resources, style
class WidgetUserIdle(QtWidgets.QWidget):
-
SIZE_W = 300
SIZE_H = 160
def __init__(self, module):
-
super(WidgetUserIdle, self).__init__()
- self.bool_is_showed = False
- self.bool_not_stopped = True
+ self.setWindowTitle("OpenPype - Stop timers")
- self.module = module
-
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
self.setWindowIcon(icon)
+
self.setWindowFlags(
QtCore.Qt.WindowCloseButtonHint
| QtCore.Qt.WindowMinimizeButtonHint
)
- self._translate = QtCore.QCoreApplication.translate
+ self._is_showed = False
+ self._timer_stopped = False
+ self._countdown = 0
+ self._countdown_start = 0
- self.font = QtGui.QFont()
- self.font.setFamily("DejaVu Sans Condensed")
- self.font.setPointSize(9)
- self.font.setBold(True)
- self.font.setWeight(50)
- self.font.setKerning(True)
+ self.module = module
+
+ msg_info = "You didn't work for a long time."
+ msg_question = "Would you like to stop Timers?"
+ msg_stopped = (
+ "Your Timers were stopped. Do you want to start them again?"
+ )
+
+ lbl_info = QtWidgets.QLabel(msg_info, self)
+ lbl_info.setTextFormat(QtCore.Qt.RichText)
+ lbl_info.setWordWrap(True)
+
+ lbl_question = QtWidgets.QLabel(msg_question, self)
+ lbl_question.setTextFormat(QtCore.Qt.RichText)
+ lbl_question.setWordWrap(True)
+
+ lbl_stopped = QtWidgets.QLabel(msg_stopped, self)
+ lbl_stopped.setTextFormat(QtCore.Qt.RichText)
+ lbl_stopped.setWordWrap(True)
+
+ lbl_rest_time = QtWidgets.QLabel(self)
+ lbl_rest_time.setTextFormat(QtCore.Qt.RichText)
+ lbl_rest_time.setWordWrap(True)
+ lbl_rest_time.setAlignment(QtCore.Qt.AlignCenter)
+
+ form = QtWidgets.QFormLayout()
+ form.setContentsMargins(10, 15, 10, 5)
+
+ form.addRow(lbl_info)
+ form.addRow(lbl_question)
+ form.addRow(lbl_stopped)
+ form.addRow(lbl_rest_time)
+
+ btn_stop = QtWidgets.QPushButton("Stop timer", self)
+ btn_stop.setToolTip("Stop's All timers")
+
+ btn_continue = QtWidgets.QPushButton("Continue", self)
+ btn_continue.setToolTip("Timer won't stop")
+
+ btn_close = QtWidgets.QPushButton("Close", self)
+ btn_close.setToolTip("Close window")
+
+ btn_restart = QtWidgets.QPushButton("Start timers", self)
+ btn_restart.setToolTip("Timer will be started again")
+
+ group_layout = QtWidgets.QHBoxLayout()
+ group_layout.addStretch(1)
+ group_layout.addWidget(btn_continue)
+ group_layout.addWidget(btn_stop)
+ group_layout.addWidget(btn_restart)
+ group_layout.addWidget(btn_close)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.addLayout(form)
+ layout.addLayout(group_layout)
+
+ count_timer = QtCore.QTimer()
+ count_timer.setInterval(1000)
+
+ btn_stop.clicked.connect(self._on_stop_clicked)
+ btn_continue.clicked.connect(self._on_continue_clicked)
+ btn_close.clicked.connect(self._close_widget)
+ btn_restart.clicked.connect(self._on_restart_clicked)
+ count_timer.timeout.connect(self._on_count_timeout)
+
+ self.lbl_info = lbl_info
+ self.lbl_question = lbl_question
+ self.lbl_stopped = lbl_stopped
+ self.lbl_rest_time = lbl_rest_time
+
+ self.btn_stop = btn_stop
+ self.btn_continue = btn_continue
+ self.btn_close = btn_close
+ self.btn_restart = btn_restart
+
+ self._count_timer = count_timer
self.resize(self.SIZE_W, self.SIZE_H)
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
self.setMaximumSize(QtCore.QSize(self.SIZE_W+100, self.SIZE_H+100))
self.setStyleSheet(style.load_stylesheet())
- self.setLayout(self._main())
- self.refresh_context()
- self.setWindowTitle('Pype - Stop timers')
+ def set_countdown_start(self, countdown):
+ self._countdown_start = countdown
+ if not self.is_showed():
+ self.reset_countdown()
- def _main(self):
- self.main = QtWidgets.QVBoxLayout()
- self.main.setObjectName('main')
+ def reset_countdown(self):
+ self._countdown = self._countdown_start
+ self._update_countdown_label()
- self.form = QtWidgets.QFormLayout()
- self.form.setContentsMargins(10, 15, 10, 5)
- self.form.setObjectName('form')
+ def is_showed(self):
+ return self._is_showed
- msg_info = 'You didn\'t work for a long time.'
- msg_question = 'Would you like to stop Timers?'
- msg_stopped = (
- 'Your Timers were stopped. Do you want to start them again?'
- )
+ def set_timer_stopped(self):
+ self._timer_stopped = True
+ self._refresh_context()
- self.lbl_info = QtWidgets.QLabel(msg_info)
- self.lbl_info.setFont(self.font)
- self.lbl_info.setTextFormat(QtCore.Qt.RichText)
- self.lbl_info.setObjectName("lbl_info")
- self.lbl_info.setWordWrap(True)
+ def _update_countdown_label(self):
+ self.lbl_rest_time.setText(str(self._countdown))
- self.lbl_question = QtWidgets.QLabel(msg_question)
- self.lbl_question.setFont(self.font)
- self.lbl_question.setTextFormat(QtCore.Qt.RichText)
- self.lbl_question.setObjectName("lbl_question")
- self.lbl_question.setWordWrap(True)
+ def _on_count_timeout(self):
+ if self._timer_stopped or not self._is_showed:
+ self._count_timer.stop()
+ return
- self.lbl_stopped = QtWidgets.QLabel(msg_stopped)
- self.lbl_stopped.setFont(self.font)
- self.lbl_stopped.setTextFormat(QtCore.Qt.RichText)
- self.lbl_stopped.setObjectName("lbl_stopped")
- self.lbl_stopped.setWordWrap(True)
-
- self.lbl_rest_time = QtWidgets.QLabel("")
- self.lbl_rest_time.setFont(self.font)
- self.lbl_rest_time.setTextFormat(QtCore.Qt.RichText)
- self.lbl_rest_time.setObjectName("lbl_rest_time")
- self.lbl_rest_time.setWordWrap(True)
- self.lbl_rest_time.setAlignment(QtCore.Qt.AlignCenter)
-
- self.form.addRow(self.lbl_info)
- self.form.addRow(self.lbl_question)
- self.form.addRow(self.lbl_stopped)
- self.form.addRow(self.lbl_rest_time)
-
- self.group_btn = QtWidgets.QHBoxLayout()
- self.group_btn.addStretch(1)
- self.group_btn.setObjectName("group_btn")
-
- self.btn_stop = QtWidgets.QPushButton("Stop timer")
- self.btn_stop.setToolTip('Stop\'s All timers')
- self.btn_stop.clicked.connect(self.stop_timer)
-
- self.btn_continue = QtWidgets.QPushButton("Continue")
- self.btn_continue.setToolTip('Timer won\'t stop')
- self.btn_continue.clicked.connect(self.continue_timer)
-
- self.btn_close = QtWidgets.QPushButton("Close")
- self.btn_close.setToolTip('Close window')
- self.btn_close.clicked.connect(self.close_widget)
-
- self.btn_restart = QtWidgets.QPushButton("Start timers")
- self.btn_restart.setToolTip('Timer will be started again')
- self.btn_restart.clicked.connect(self.restart_timer)
-
- self.group_btn.addWidget(self.btn_continue)
- self.group_btn.addWidget(self.btn_stop)
- self.group_btn.addWidget(self.btn_restart)
- self.group_btn.addWidget(self.btn_close)
-
- self.main.addLayout(self.form)
- self.main.addLayout(self.group_btn)
-
- return self.main
-
- def refresh_context(self):
- self.lbl_question.setVisible(self.bool_not_stopped)
- self.lbl_rest_time.setVisible(self.bool_not_stopped)
- self.lbl_stopped.setVisible(not self.bool_not_stopped)
-
- self.btn_continue.setVisible(self.bool_not_stopped)
- self.btn_stop.setVisible(self.bool_not_stopped)
- self.btn_restart.setVisible(not self.bool_not_stopped)
- self.btn_close.setVisible(not self.bool_not_stopped)
-
- def change_count_widget(self, time):
- str_time = str(time)
- self.lbl_rest_time.setText(str_time)
-
- def stop_timer(self):
- self.module.stop_timers()
- self.close_widget()
-
- def restart_timer(self):
- self.module.restart_timers()
- self.close_widget()
-
- def continue_timer(self):
- self.close_widget()
-
- def closeEvent(self, event):
- event.ignore()
- if self.bool_not_stopped is True:
- self.continue_timer()
+ if self._countdown <= 0:
+ self._stop_timers()
+ self.set_timer_stopped()
else:
- self.close_widget()
+ self._countdown -= 1
+ self._update_countdown_label()
- def close_widget(self):
- self.bool_is_showed = False
- self.bool_not_stopped = True
- self.refresh_context()
+ def _refresh_context(self):
+ self.lbl_question.setVisible(not self._timer_stopped)
+ self.lbl_rest_time.setVisible(not self._timer_stopped)
+ self.lbl_stopped.setVisible(self._timer_stopped)
+
+ self.btn_continue.setVisible(not self._timer_stopped)
+ self.btn_stop.setVisible(not self._timer_stopped)
+ self.btn_restart.setVisible(self._timer_stopped)
+ self.btn_close.setVisible(self._timer_stopped)
+
+ def _stop_timers(self):
+ self.module.stop_timers()
+
+ def _on_stop_clicked(self):
+ self._stop_timers()
+ self._close_widget()
+
+ def _on_restart_clicked(self):
+ self.module.restart_timers()
+ self._close_widget()
+
+ def _on_continue_clicked(self):
+ self._close_widget()
+
+ def _close_widget(self):
+ self._is_showed = False
+ self._timer_stopped = False
+ self._refresh_context()
self.hide()
def showEvent(self, event):
- self.bool_is_showed = True
+ if not self._is_showed:
+ self._is_showed = True
+ self._refresh_context()
+
+ if not self._count_timer.isActive():
+ self._count_timer.start()
+ super(WidgetUserIdle, self).showEvent(event)
+
+ def closeEvent(self, event):
+ event.ignore()
+ if self._timer_stopped:
+ self._close_widget()
+ else:
+ self._on_continue_clicked()
class SignalHandler(QtCore.QObject):
signal_show_message = QtCore.Signal()
- signal_change_label = QtCore.Signal()
signal_stop_timers = QtCore.Signal()
def __init__(self, module):
super(SignalHandler, self).__init__()
self.module = module
self.signal_show_message.connect(module.show_message)
- self.signal_change_label.connect(module.change_label)
self.signal_stop_timers.connect(module.stop_timers)
diff --git a/openpype/modules/default_modules/webserver/interfaces.py b/openpype/modules/default_modules/webserver/interfaces.py
deleted file mode 100644
index 779361a9ec..0000000000
--- a/openpype/modules/default_modules/webserver/interfaces.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from abc import abstractmethod
-from openpype.modules import OpenPypeInterface
-
-
-class IWebServerRoutes(OpenPypeInterface):
- """Other modules interface to register their routes."""
- @abstractmethod
- def webserver_initialization(self, server_manager):
- pass
diff --git a/openpype/modules/default_modules/webserver/webserver_module.py b/openpype/modules/default_modules/webserver/webserver_module.py
index 5bfb2d6390..686bd27bfd 100644
--- a/openpype/modules/default_modules/webserver/webserver_module.py
+++ b/openpype/modules/default_modules/webserver/webserver_module.py
@@ -1,12 +1,31 @@
+"""WebServerModule spawns aiohttp server in asyncio loop.
+
+Main usage of the module is in OpenPype tray where make sense to add ability
+of other modules to add theirs routes. Module which would want use that
+option must have implemented method `webserver_initialization` which must
+expect `WebServerManager` object where is possible to add routes or paths
+with handlers.
+
+WebServerManager is by default created only in tray.
+
+It is possible to create server manager without using module logic at all
+using `create_new_server_manager`. That can be handy for standalone scripts
+with predefined host and port and separated routes and logic.
+
+Running multiple servers in one process is not recommended and probably won't
+work as expected. It is because of few limitations connected to asyncio module.
+
+When module's `create_server_manager` is called it is also set environment
+variable "OPENPYPE_WEBSERVER_URL". Which should lead to root access point
+of server.
+"""
+
import os
import socket
from openpype import resources
from openpype.modules import OpenPypeModule
-from openpype_interfaces import (
- ITrayService,
- IWebServerRoutes
-)
+from openpype_interfaces import ITrayService
class WebServerModule(OpenPypeModule, ITrayService):
@@ -28,8 +47,15 @@ class WebServerModule(OpenPypeModule, ITrayService):
return
for module in enabled_modules:
- if isinstance(module, IWebServerRoutes):
+ if not hasattr(module, "webserver_initialization"):
+ continue
+
+ try:
module.webserver_initialization(self.server_manager)
+ except Exception:
+ self.log.warning((
+ "Failed to connect module \"{}\" to webserver."
+ ).format(module.name))
def tray_init(self):
self.create_server_manager()
diff --git a/openpype/modules/example_addons/example_addon/__init__.py b/openpype/modules/example_addons/example_addon/__init__.py
new file mode 100644
index 0000000000..721d924436
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/__init__.py
@@ -0,0 +1,15 @@
+""" Addon class definition and Settings definition must be imported here.
+
+If addon class or settings definition won't be here their definition won't
+be found by OpenPype discovery.
+"""
+
+from .addon import (
+ AddonSettingsDef,
+ ExampleAddon
+)
+
+__all__ = (
+ "AddonSettingsDef",
+ "ExampleAddon"
+)
diff --git a/openpype/modules/example_addons/example_addon/addon.py b/openpype/modules/example_addons/example_addon/addon.py
new file mode 100644
index 0000000000..5573e33cc1
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/addon.py
@@ -0,0 +1,132 @@
+"""Addon definition is located here.
+
+Import of python packages that may not be available should not be imported
+in global space here until are required or used.
+- Qt related imports
+- imports of Python 3 packages
+ - we still support Python 2 hosts where addon definition should available
+"""
+
+import os
+
+from openpype.modules import (
+ JsonFilesSettingsDef,
+ OpenPypeAddOn
+)
+# Import interface defined by this addon to be able find other addons using it
+from openpype_interfaces import (
+ IExampleInterface,
+ IPluginPaths,
+ ITrayAction
+)
+
+
+# Settings definition of this addon using `JsonFilesSettingsDef`
+# - JsonFilesSettingsDef is prepared settings definition using json files
+# to define settings and store default values
+class AddonSettingsDef(JsonFilesSettingsDef):
+ # This will add prefixes to every schema and template from `schemas`
+ # subfolder.
+ # - it is not required to fill the prefix but it is highly
+ # recommended as schemas and templates may have name clashes across
+ # multiple addons
+ # - it is also recommended that prefix has addon name in it
+ schema_prefix = "example_addon"
+
+ def get_settings_root_path(self):
+ """Implemented abstract class of JsonFilesSettingsDef.
+
+ Return directory path where json files defying addon settings are
+ located.
+ """
+ return os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ "settings"
+ )
+
+
+class ExampleAddon(OpenPypeAddOn, IPluginPaths, ITrayAction):
+ """This Addon has defined it's settings and interface.
+
+ This example has system settings with an enabled option. And use
+ few other interfaces:
+ - `IPluginPaths` to define custom plugin paths
+ - `ITrayAction` to be shown in tray tool
+ """
+ label = "Example Addon"
+ name = "example_addon"
+
+ def initialize(self, settings):
+ """Initialization of addon."""
+ module_settings = settings[self.name]
+ # Enabled by settings
+ self.enabled = module_settings.get("enabled", False)
+
+ # Prepare variables that can be used or set afterwards
+ self._connected_modules = None
+ # UI which must not be created at this time
+ self._dialog = None
+
+ def tray_init(self):
+ """Implementation of abstract method for `ITrayAction`.
+
+ We're definitely in tray tool so we can pre create dialog.
+ """
+
+ self._create_dialog()
+
+ def connect_with_modules(self, enabled_modules):
+ """Method where you should find connected modules.
+
+ It is triggered by OpenPype modules manager at the best possible time.
+ Some addons and modules may required to connect with other modules
+ before their main logic is executed so changes would require to restart
+ whole process.
+ """
+ self._connected_modules = []
+ for module in enabled_modules:
+ if isinstance(module, IExampleInterface):
+ self._connected_modules.append(module)
+
+ def _create_dialog(self):
+ # Don't recreate dialog if already exists
+ if self._dialog is not None:
+ return
+
+ from .widgets import MyExampleDialog
+
+ self._dialog = MyExampleDialog()
+
+ def show_dialog(self):
+ """Show dialog with connected modules.
+
+ This can be called from anywhere but can also crash in headless mode.
+ There is no way to prevent addon to do invalid operations if he's
+ not handling them.
+ """
+ # Make sure dialog is created
+ self._create_dialog()
+ # Change value of dialog by current state
+ self._dialog.set_connected_modules(self.get_connected_modules())
+ # Show dialog
+ self._dialog.open()
+
+ def get_connected_modules(self):
+ """Custom implementation of addon."""
+ names = set()
+ if self._connected_modules is not None:
+ for module in self._connected_modules:
+ names.add(module.name)
+ return names
+
+ def on_action_trigger(self):
+ """Implementation of abstract method for `ITrayAction`."""
+ self.show_dialog()
+
+ def get_plugin_paths(self):
+ """Implementation of abstract method for `IPluginPaths`."""
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+
+ return {
+ "publish": [os.path.join(current_dir, "plugins", "publish")]
+ }
diff --git a/openpype/modules/example_addons/example_addon/interfaces.py b/openpype/modules/example_addons/example_addon/interfaces.py
new file mode 100644
index 0000000000..371536efc7
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/interfaces.py
@@ -0,0 +1,28 @@
+""" Using interfaces is one way of connecting multiple OpenPype Addons/Modules.
+
+Interfaces must be in `interfaces.py` file (or folder). Interfaces should not
+import module logic or other module in global namespace. That is because
+all of them must be imported before all OpenPype AddOns and Modules.
+
+Ideally they should just define abstract and helper methods. If interface
+require any logic or connection it should be defined in module.
+
+Keep in mind that attributes and methods will be added to other addon
+attributes and methods so they should be unique and ideally contain
+addon name in it's name.
+"""
+
+from abc import abstractmethod
+from openpype.modules import OpenPypeInterface
+
+
+class IExampleInterface(OpenPypeInterface):
+ """Example interface of addon."""
+ _example_module = None
+
+ def get_example_module(self):
+ return self._example_module
+
+ @abstractmethod
+ def example_method_of_example_interface(self):
+ pass
diff --git a/openpype/modules/example_addons/example_addon/plugins/publish/example_plugin.py b/openpype/modules/example_addons/example_addon/plugins/publish/example_plugin.py
new file mode 100644
index 0000000000..695120e93b
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/plugins/publish/example_plugin.py
@@ -0,0 +1,9 @@
+import pyblish.api
+
+
+class CollectExampleAddon(pyblish.api.ContextPlugin):
+ order = pyblish.api.CollectorOrder + 0.4
+ label = "Collect Example Addon"
+
+ def process(self, context):
+ self.log.info("I'm in example addon's plugin!")
diff --git a/openpype/modules/example_addons/example_addon/settings/defaults/project_settings.json b/openpype/modules/example_addons/example_addon/settings/defaults/project_settings.json
new file mode 100644
index 0000000000..0a01fa8977
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/settings/defaults/project_settings.json
@@ -0,0 +1,15 @@
+{
+ "project_settings/example_addon": {
+ "number": 0,
+ "color_1": [
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ "color_2": [
+ 0.0,
+ 0.0,
+ 0.0
+ ]
+ }
+}
\ No newline at end of file
diff --git a/openpype/modules/example_addons/example_addon/settings/defaults/system_settings.json b/openpype/modules/example_addons/example_addon/settings/defaults/system_settings.json
new file mode 100644
index 0000000000..1e77356373
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/settings/defaults/system_settings.json
@@ -0,0 +1,5 @@
+{
+ "modules/example_addon": {
+ "enabled": true
+ }
+}
\ No newline at end of file
diff --git a/openpype/modules/example_addons/example_addon/settings/dynamic_schemas/project_dynamic_schemas.json b/openpype/modules/example_addons/example_addon/settings/dynamic_schemas/project_dynamic_schemas.json
new file mode 100644
index 0000000000..1f3da7b37f
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/settings/dynamic_schemas/project_dynamic_schemas.json
@@ -0,0 +1,6 @@
+{
+ "project_settings/global": {
+ "type": "schema",
+ "name": "example_addon/main"
+ }
+}
diff --git a/openpype/modules/example_addons/example_addon/settings/dynamic_schemas/system_dynamic_schemas.json b/openpype/modules/example_addons/example_addon/settings/dynamic_schemas/system_dynamic_schemas.json
new file mode 100644
index 0000000000..6faa48ba74
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/settings/dynamic_schemas/system_dynamic_schemas.json
@@ -0,0 +1,6 @@
+{
+ "system_settings/modules": {
+ "type": "schema",
+ "name": "example_addon/main"
+ }
+}
diff --git a/openpype/modules/example_addons/example_addon/settings/schemas/project_schemas/main.json b/openpype/modules/example_addons/example_addon/settings/schemas/project_schemas/main.json
new file mode 100644
index 0000000000..ba692d860e
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/settings/schemas/project_schemas/main.json
@@ -0,0 +1,30 @@
+{
+ "type": "dict",
+ "key": "example_addon",
+ "label": "Example addon",
+ "collapsible": true,
+ "children": [
+ {
+ "type": "number",
+ "key": "number",
+ "label": "This is your lucky number:",
+ "minimum": 7,
+ "maximum": 7,
+ "decimals": 0
+ },
+ {
+ "type": "template",
+ "name": "example_addon/the_template",
+ "template_data": [
+ {
+ "name": "color_1",
+ "label": "Color 1"
+ },
+ {
+ "name": "color_2",
+ "label": "Color 2"
+ }
+ ]
+ }
+ ]
+}
diff --git a/openpype/modules/example_addons/example_addon/settings/schemas/project_schemas/the_template.json b/openpype/modules/example_addons/example_addon/settings/schemas/project_schemas/the_template.json
new file mode 100644
index 0000000000..af8fd9dae4
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/settings/schemas/project_schemas/the_template.json
@@ -0,0 +1,30 @@
+[
+ {
+ "type": "list-strict",
+ "key": "{name}",
+ "label": "{label}",
+ "object_types": [
+ {
+ "label": "Red",
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1,
+ "decimal": 3
+ },
+ {
+ "label": "Green",
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1,
+ "decimal": 3
+ },
+ {
+ "label": "Blue",
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1,
+ "decimal": 3
+ }
+ ]
+ }
+]
diff --git a/openpype/modules/example_addons/example_addon/settings/schemas/system_schemas/main.json b/openpype/modules/example_addons/example_addon/settings/schemas/system_schemas/main.json
new file mode 100644
index 0000000000..0fb0a7c1be
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/settings/schemas/system_schemas/main.json
@@ -0,0 +1,14 @@
+{
+ "type": "dict",
+ "key": "example_addon",
+ "label": "Example addon",
+ "collapsible": true,
+ "checkbox_key": "enabled",
+ "children": [
+ {
+ "type": "boolean",
+ "key": "enabled",
+ "label": "Enabled"
+ }
+ ]
+}
diff --git a/openpype/modules/example_addons/example_addon/widgets.py b/openpype/modules/example_addons/example_addon/widgets.py
new file mode 100644
index 0000000000..0acf238409
--- /dev/null
+++ b/openpype/modules/example_addons/example_addon/widgets.py
@@ -0,0 +1,39 @@
+from Qt import QtWidgets
+
+from openpype.style import load_stylesheet
+
+
+class MyExampleDialog(QtWidgets.QDialog):
+ def __init__(self, parent=None):
+ super(MyExampleDialog, self).__init__(parent)
+
+ self.setWindowTitle("Connected modules")
+
+ label_widget = QtWidgets.QLabel(self)
+
+ ok_btn = QtWidgets.QPushButton("OK", self)
+ btns_layout = QtWidgets.QHBoxLayout()
+ btns_layout.addStretch(1)
+ btns_layout.addWidget(ok_btn)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.addWidget(label_widget)
+ layout.addLayout(btns_layout)
+
+ ok_btn.clicked.connect(self._on_ok_clicked)
+
+ self._label_widget = label_widget
+
+ self.setStyleSheet(load_stylesheet())
+
+ def _on_ok_clicked(self):
+ self.done(1)
+
+ def set_connected_modules(self, connected_modules):
+ if connected_modules:
+ message = "\n".join(connected_modules)
+ else:
+ message = (
+ "Other enabled modules/addons are not using my interface."
+ )
+ self._label_widget.setText(message)
diff --git a/openpype/modules/example_addons/tiny_addon.py b/openpype/modules/example_addons/tiny_addon.py
new file mode 100644
index 0000000000..62962954f5
--- /dev/null
+++ b/openpype/modules/example_addons/tiny_addon.py
@@ -0,0 +1,9 @@
+from openpype.modules import OpenPypeAddOn
+
+
+class TinyAddon(OpenPypeAddOn):
+ """This is tiniest possible addon.
+
+ This addon won't do much but will exist in OpenPype modules environment.
+ """
+ name = "tiniest_addon_ever"
diff --git a/openpype/plugins/load/delivery.py b/openpype/plugins/load/delivery.py
index 3753f1bfc9..a8cb0070ee 100644
--- a/openpype/plugins/load/delivery.py
+++ b/openpype/plugins/load/delivery.py
@@ -71,7 +71,7 @@ class DeliveryOptionsDialog(QtWidgets.QDialog):
self._set_representations(contexts)
self.setWindowTitle("OpenPype - Deliver versions")
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowFlags(
diff --git a/openpype/plugins/load/open_djv.py b/openpype/plugins/load/open_djv.py
index 39b54364d9..5b49bb58d0 100644
--- a/openpype/plugins/load/open_djv.py
+++ b/openpype/plugins/load/open_djv.py
@@ -1,26 +1,28 @@
import os
-import subprocess
from avalon import api
+from openpype.api import ApplicationManager
def existing_djv_path():
- djv_paths = os.environ.get("DJV_PATH") or ""
- for path in djv_paths.split(os.pathsep):
- if os.path.exists(path):
- return path
- return None
+ app_manager = ApplicationManager()
+ djv_list = []
+ for app_name, app in app_manager.applications.items():
+ if 'djv' in app_name and app.find_executable():
+ djv_list.append(app_name)
+
+ return djv_list
class OpenInDJV(api.Loader):
"""Open Image Sequence with system default"""
- djv_path = existing_djv_path()
- families = ["*"] if djv_path else []
+ djv_list = existing_djv_path()
+ families = ["*"] if djv_list else []
representations = [
"cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg",
"mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut",
"1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf",
- "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img"
+ "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img", "h264",
]
label = "Open in DJV"
@@ -41,20 +43,18 @@ class OpenInDJV(api.Loader):
)
if not remainder:
- seqeunce = collections[0]
- first_image = list(seqeunce)[0]
+ sequence = collections[0]
+ first_image = list(sequence)[0]
else:
first_image = self.fname
filepath = os.path.normpath(os.path.join(directory, first_image))
self.log.info("Opening : {}".format(filepath))
- cmd = [
- # DJV path
- os.path.normpath(self.djv_path),
- # PATH TO COMPONENT
- os.path.normpath(filepath)
- ]
+ last_djv_version = sorted(self.djv_list)[-1]
- # Run DJV with these commands
- subprocess.Popen(cmd)
+ app_manager = ApplicationManager()
+ djv = app_manager.applications.get(last_djv_version)
+ djv.arguments.append(filepath)
+
+ app_manager.launch(last_djv_version)
diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_avalon_entities.py
index 0b6423818e..a6120d42fe 100644
--- a/openpype/plugins/publish/collect_avalon_entities.py
+++ b/openpype/plugins/publish/collect_avalon_entities.py
@@ -22,6 +22,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
io.install()
project_name = api.Session["AVALON_PROJECT"]
asset_name = api.Session["AVALON_ASSET"]
+ task_name = api.Session["AVALON_TASK"]
project_entity = io.find_one({
"type": "project",
@@ -48,6 +49,12 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
data = asset_entity['data']
+ # Task type
+ asset_tasks = data.get("tasks") or {}
+ task_info = asset_tasks.get(task_name) or {}
+ task_type = task_info.get("type")
+ context.data["taskType"] = task_type
+
frame_start = data.get("frameStart")
if frame_start is None:
frame_start = 1
diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py
index 1aa10fcb9b..f7d1c6b4be 100644
--- a/openpype/plugins/publish/collect_hierarchy.py
+++ b/openpype/plugins/publish/collect_hierarchy.py
@@ -13,7 +13,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
"""
label = "Collect Hierarchy"
- order = pyblish.api.CollectorOrder - 0.57
+ order = pyblish.api.CollectorOrder - 0.47
families = ["shot"]
hosts = ["resolve", "hiero"]
diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py
index e1b8b95a46..a35ef47e79 100644
--- a/openpype/plugins/publish/collect_otio_frame_ranges.py
+++ b/openpype/plugins/publish/collect_otio_frame_ranges.py
@@ -18,7 +18,7 @@ class CollectOcioFrameRanges(pyblish.api.InstancePlugin):
Adding timeline and source ranges to instance data"""
label = "Collect OTIO Frame Ranges"
- order = pyblish.api.CollectorOrder - 0.58
+ order = pyblish.api.CollectorOrder - 0.48
families = ["shot", "clip"]
hosts = ["resolve", "hiero"]
diff --git a/openpype/plugins/publish/collect_otio_review.py b/openpype/plugins/publish/collect_otio_review.py
index e78ccc032c..10ceafdcca 100644
--- a/openpype/plugins/publish/collect_otio_review.py
+++ b/openpype/plugins/publish/collect_otio_review.py
@@ -20,7 +20,7 @@ class CollectOcioReview(pyblish.api.InstancePlugin):
"""Get matching otio track from defined review layer"""
label = "Collect OTIO Review"
- order = pyblish.api.CollectorOrder - 0.57
+ order = pyblish.api.CollectorOrder - 0.47
families = ["clip"]
hosts = ["resolve", "hiero"]
diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py
index 010430a303..dd670ff850 100644
--- a/openpype/plugins/publish/collect_otio_subset_resources.py
+++ b/openpype/plugins/publish/collect_otio_subset_resources.py
@@ -18,7 +18,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
"""Get Resources for a subset version"""
label = "Collect OTIO Subset Resources"
- order = pyblish.api.CollectorOrder - 0.57
+ order = pyblish.api.CollectorOrder - 0.47
families = ["clip"]
hosts = ["resolve", "hiero"]
diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py
index 98b59332da..c21f09ab8d 100644
--- a/openpype/plugins/publish/collect_resources_path.py
+++ b/openpype/plugins/publish/collect_resources_path.py
@@ -26,6 +26,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
"animation",
"model",
"mayaAscii",
+ "mayaScene",
"setdress",
"layout",
"ass",
diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py
index 625125321c..207e696fb1 100644
--- a/openpype/plugins/publish/extract_burnin.py
+++ b/openpype/plugins/publish/extract_burnin.py
@@ -1,6 +1,5 @@
import os
import re
-import subprocess
import json
import copy
import tempfile
@@ -158,6 +157,11 @@ class ExtractBurnin(openpype.api.Extractor):
filled_anatomy = anatomy.format_all(burnin_data)
burnin_data["anatomy"] = filled_anatomy.get_solved()
+ # Add context data burnin_data.
+ burnin_data["custom"] = (
+ instance.data.get("custom_burnin_data") or {}
+ )
+
# Add source camera name to burnin data
camera_name = repre.get("camera_name")
if camera_name:
@@ -226,7 +230,8 @@ class ExtractBurnin(openpype.api.Extractor):
"options": copy.deepcopy(burnin_options),
"values": burnin_values,
"full_input_path": temp_data["full_input_paths"][0],
- "first_frame": temp_data["first_frame"]
+ "first_frame": temp_data["first_frame"],
+ "ffmpeg_cmd": new_repre.get("ffmpeg_cmd", "")
}
self.log.debug(
diff --git a/openpype/plugins/publish/extract_jpeg_exr.py b/openpype/plugins/publish/extract_jpeg_exr.py
index ae691285b5..3c08c1862d 100644
--- a/openpype/plugins/publish/extract_jpeg_exr.py
+++ b/openpype/plugins/publish/extract_jpeg_exr.py
@@ -1,10 +1,16 @@
import os
import pyblish.api
-import openpype.api
-import openpype.lib
-from openpype.lib import should_decompress, \
- get_decompress_dir, decompress
+from openpype.lib import (
+ get_ffmpeg_tool_path,
+
+ run_subprocess,
+ path_to_subprocess_arg,
+
+ should_decompress,
+ get_decompress_dir,
+ decompress
+)
import shutil
@@ -85,17 +91,19 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
self.log.info("output {}".format(full_output_path))
- ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
+ ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
ffmpeg_args = self.ffmpeg_args or {}
jpeg_items = []
- jpeg_items.append("\"{}\"".format(ffmpeg_path))
+ jpeg_items.append(path_to_subprocess_arg(ffmpeg_path))
# override file if already exists
jpeg_items.append("-y")
# use same input args like with mov
jpeg_items.extend(ffmpeg_args.get("input") or [])
# input file
- jpeg_items.append("-i \"{}\"".format(full_input_path))
+ jpeg_items.append("-i {}".format(
+ path_to_subprocess_arg(full_input_path)
+ ))
# output arguments from presets
jpeg_items.extend(ffmpeg_args.get("output") or [])
@@ -104,21 +112,22 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
jpeg_items.append("-vframes 1")
# output file
- jpeg_items.append("\"{}\"".format(full_output_path))
+ jpeg_items.append(path_to_subprocess_arg(full_output_path))
- subprocess_jpeg = " ".join(jpeg_items)
+ subprocess_command = " ".join(jpeg_items)
# run subprocess
- self.log.debug("{}".format(subprocess_jpeg))
+ self.log.debug("{}".format(subprocess_command))
try: # temporary until oiiotool is supported cross platform
- openpype.api.run_subprocess(
- subprocess_jpeg, shell=True, logger=self.log
+ run_subprocess(
+ subprocess_command, shell=True, logger=self.log
)
except RuntimeError as exp:
if "Compression" in str(exp):
self.log.debug("Unsupported compression on input files. " +
"Skipping!!!")
return
+ self.log.warning("Conversion crashed", exc_info=True)
raise
if "representations" not in instance.data:
diff --git a/openpype/plugins/publish/extract_otio_audio_tracks.py b/openpype/plugins/publish/extract_otio_audio_tracks.py
index 2dc822fb0e..be0bae5cdc 100644
--- a/openpype/plugins/publish/extract_otio_audio_tracks.py
+++ b/openpype/plugins/publish/extract_otio_audio_tracks.py
@@ -2,7 +2,8 @@ import os
import pyblish
import openpype.api
from openpype.lib import (
- get_ffmpeg_tool_path
+ get_ffmpeg_tool_path,
+ path_to_subprocess_arg
)
import tempfile
import opentimelineio as otio
@@ -56,14 +57,14 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
audio_inputs.insert(0, empty)
# create cmd
- cmd = '"{}"'.format(self.ffmpeg_path) + " "
+ cmd = path_to_subprocess_arg(self.ffmpeg_path) + " "
cmd += self.create_cmd(audio_inputs)
- cmd += "\"{}\"".format(audio_temp_fpath)
+ cmd += path_to_subprocess_arg(audio_temp_fpath)
# run subprocess
self.log.debug("Executing: {}".format(cmd))
openpype.api.run_subprocess(
- cmd, logger=self.log
+ cmd, shell=True, logger=self.log
)
# remove empty
@@ -99,16 +100,16 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
# temp audio file
audio_fpath = self.create_temp_file(name)
- cmd = " ".join([
- '"{}"'.format(self.ffmpeg_path),
- "-ss {}".format(start_sec),
- "-t {}".format(duration_sec),
- "-i \"{}\"".format(audio_file),
+ cmd = [
+ self.ffmpeg_path,
+ "-ss", str(start_sec),
+ "-t", str(duration_sec),
+ "-i", audio_file,
audio_fpath
- ])
+ ]
# run subprocess
- self.log.debug("Executing: {}".format(cmd))
+ self.log.debug("Executing: {}".format(" ".join(cmd)))
openpype.api.run_subprocess(
cmd, logger=self.log
)
@@ -220,17 +221,17 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
max_duration_sec = max(end_secs)
# create empty cmd
- cmd = " ".join([
- '"{}"'.format(self.ffmpeg_path),
- "-f lavfi",
- "-i anullsrc=channel_layout=stereo:sample_rate=48000",
- "-t {}".format(max_duration_sec),
- "\"{}\"".format(empty_fpath)
- ])
+ cmd = [
+ self.ffmpeg_path,
+ "-f", "lavfi",
+ "-i", "anullsrc=channel_layout=stereo:sample_rate=48000",
+ "-t", str(max_duration_sec),
+ empty_fpath
+ ]
# generate empty with ffmpeg
# run subprocess
- self.log.debug("Executing: {}".format(cmd))
+ self.log.debug("Executing: {}".format(" ".join(cmd)))
openpype.api.run_subprocess(
cmd, logger=self.log
@@ -261,10 +262,14 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
for index, input in enumerate(inputs):
input_format = input.copy()
input_format.update({"i": index})
+ input_format["mediaPath"] = path_to_subprocess_arg(
+ input_format["mediaPath"]
+ )
+
_inputs += (
"-ss {startSec} "
"-t {durationSec} "
- "-i \"{mediaPath}\" "
+ "-i {mediaPath} "
).format(**input_format)
_filters += "[{i}]adelay={delayMilSec}:all=1[r{i}]; ".format(
diff --git a/openpype/plugins/publish/extract_otio_review.py b/openpype/plugins/publish/extract_otio_review.py
index 818903b54b..ed2ba017d5 100644
--- a/openpype/plugins/publish/extract_otio_review.py
+++ b/openpype/plugins/publish/extract_otio_review.py
@@ -312,7 +312,7 @@ class ExtractOTIOReview(openpype.api.Extractor):
out_frame_start += end_offset
# start command list
- command = ['"{}"'.format(ffmpeg_path)]
+ command = [ffmpeg_path]
if sequence:
input_dir, collection = sequence
@@ -324,8 +324,8 @@ class ExtractOTIOReview(openpype.api.Extractor):
# form command for rendering gap files
command.extend([
- "-start_number {}".format(in_frame_start),
- "-i \"{}\"".format(input_path)
+ "-start_number", str(in_frame_start),
+ "-i", input_path
])
elif video:
@@ -334,13 +334,15 @@ class ExtractOTIOReview(openpype.api.Extractor):
input_fps = otio_range.start_time.rate
frame_duration = otio_range.duration.value
sec_start = openpype.lib.frames_to_secons(frame_start, input_fps)
- sec_duration = openpype.lib.frames_to_secons(frame_duration, input_fps)
+ sec_duration = openpype.lib.frames_to_secons(
+ frame_duration, input_fps
+ )
# form command for rendering gap files
command.extend([
- "-ss {}".format(sec_start),
- "-t {}".format(sec_duration),
- "-i \"{}\"".format(video_path)
+ "-ss", str(sec_start),
+ "-t", str(sec_duration),
+ "-i", video_path
])
elif gap:
@@ -349,22 +351,24 @@ class ExtractOTIOReview(openpype.api.Extractor):
# form command for rendering gap files
command.extend([
- "-t {} -r {}".format(sec_duration, self.actual_fps),
- "-f lavfi",
- "-i color=c=black:s={}x{}".format(self.to_width,
- self.to_height),
- "-tune stillimage"
+ "-t", str(sec_duration),
+ "-r", str(self.actual_fps),
+ "-f", "lavfi",
+ "-i", "color=c=black:s={}x{}".format(
+ self.to_width, self.to_height
+ ),
+ "-tune", "stillimage"
])
# add output attributes
command.extend([
- "-start_number {}".format(out_frame_start),
- "\"{}\"".format(output_path)
+ "-start_number", str(out_frame_start),
+ output_path
])
# execute
self.log.debug("Executing: {}".format(" ".join(command)))
output = openpype.api.run_subprocess(
- " ".join(command), logger=self.log
+ command, logger=self.log
)
self.log.debug("Output: {}".format(output))
diff --git a/openpype/plugins/publish/extract_otio_trimming_video.py b/openpype/plugins/publish/extract_otio_trimming_video.py
index fdb7c4b096..3e2d39c99c 100644
--- a/openpype/plugins/publish/extract_otio_trimming_video.py
+++ b/openpype/plugins/publish/extract_otio_trimming_video.py
@@ -75,7 +75,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor):
output_path = self._get_ffmpeg_output(input_file_path)
# start command list
- command = ['"{}"'.format(ffmpeg_path)]
+ command = [ffmpeg_path]
video_path = input_file_path
frame_start = otio_range.start_time.value
@@ -86,17 +86,17 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor):
# form command for rendering gap files
command.extend([
- "-ss {}".format(sec_start),
- "-t {}".format(sec_duration),
- "-i \"{}\"".format(video_path),
- "-c copy",
+ "-ss", str(sec_start),
+ "-t", str(sec_duration),
+ "-i", video_path,
+ "-c", "copy",
output_path
])
# execute
self.log.debug("Executing: {}".format(" ".join(command)))
output = openpype.api.run_subprocess(
- " ".join(command), logger=self.log
+ command, logger=self.log
)
self.log.debug("Output: {}".format(output))
diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py
index 78cbea10be..7284483f5f 100644
--- a/openpype/plugins/publish/extract_review.py
+++ b/openpype/plugins/publish/extract_review.py
@@ -13,6 +13,9 @@ import openpype.api
from openpype.lib import (
get_ffmpeg_tool_path,
ffprobe_streams,
+
+ path_to_subprocess_arg,
+
should_decompress,
get_decompress_dir,
decompress
@@ -27,8 +30,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
otherwise the representation is ignored.
All new representations are created and encoded by ffmpeg following
- presets found in `pype-config/presets/plugins/global/
- publish.json:ExtractReview:outputs`.
+ presets found in OpenPype Settings interface at
+ `project_settings/global/publish/ExtractReview/profiles:outputs`.
"""
label = "Extract Review"
@@ -238,7 +241,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
"outputName": output_name,
"outputDef": output_def,
"frameStartFtrack": temp_data["output_frame_start"],
- "frameEndFtrack": temp_data["output_frame_end"]
+ "frameEndFtrack": temp_data["output_frame_end"],
+ "ffmpeg_cmd": subprcs_cmd
})
# Force to pop these key if are in new repre
@@ -480,7 +484,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
# Add video/image input path
ffmpeg_input_args.append(
- "-i \"{}\"".format(temp_data["full_input_path"])
+ "-i {}".format(
+ path_to_subprocess_arg(temp_data["full_input_path"])
+ )
)
# Add audio arguments if there are any. Skipped when output are images.
@@ -538,7 +544,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
# NOTE This must be latest added item to output arguments.
ffmpeg_output_args.append(
- "\"{}\"".format(temp_data["full_output_path"])
+ path_to_subprocess_arg(temp_data["full_output_path"])
)
return self.ffmpeg_full_args(
@@ -607,7 +613,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
audio_filters.append(arg)
all_args = []
- all_args.append("\"{}\"".format(self.ffmpeg_path))
+ all_args.append(path_to_subprocess_arg(self.ffmpeg_path))
all_args.extend(input_args)
if video_filters:
all_args.append("-filter:v")
@@ -854,7 +860,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
audio_in_args.append("-to {:0.10f}".format(audio_duration))
# Add audio input path
- audio_in_args.append("-i \"{}\"".format(audio["filename"]))
+ audio_in_args.append("-i {}".format(
+ path_to_subprocess_arg(audio["filename"])
+ ))
# NOTE: These were changed from input to output arguments.
# NOTE: value in "-ac" was hardcoded to 2, changed to audio inputs len.
diff --git a/openpype/plugins/publish/extract_review_slate.py b/openpype/plugins/publish/extract_review_slate.py
index 2b07d7db74..7002168cdb 100644
--- a/openpype/plugins/publish/extract_review_slate.py
+++ b/openpype/plugins/publish/extract_review_slate.py
@@ -117,11 +117,13 @@ class ExtractReviewSlate(openpype.api.Extractor):
input_args.extend(repre["_profile"].get('input', []))
else:
input_args.extend(repre["outputDef"].get('input', []))
- input_args.append("-loop 1 -i {}".format(slate_path))
+ input_args.append("-loop 1 -i {}".format(
+ openpype.lib.path_to_subprocess_arg(slate_path)
+ ))
input_args.extend([
"-r {}".format(fps),
- "-t 0.04"]
- )
+ "-t 0.04"
+ ])
if use_legacy_code:
codec_args = repre["_profile"].get('codec', [])
@@ -188,20 +190,24 @@ class ExtractReviewSlate(openpype.api.Extractor):
output_args.append("-y")
slate_v_path = slate_path.replace(".png", ext)
- output_args.append(slate_v_path)
+ output_args.append(
+ openpype.lib.path_to_subprocess_arg(slate_v_path)
+ )
_remove_at_end.append(slate_v_path)
slate_args = [
- "\"{}\"".format(ffmpeg_path),
+ openpype.lib.path_to_subprocess_arg(ffmpeg_path),
" ".join(input_args),
" ".join(output_args)
]
- slate_subprcs_cmd = " ".join(slate_args)
+ slate_subprocess_cmd = " ".join(slate_args)
# run slate generation subprocess
- self.log.debug("Slate Executing: {}".format(slate_subprcs_cmd))
+ self.log.debug(
+ "Slate Executing: {}".format(slate_subprocess_cmd)
+ )
openpype.api.run_subprocess(
- slate_subprcs_cmd, shell=True, logger=self.log
+ slate_subprocess_cmd, shell=True, logger=self.log
)
# create ffmpeg concat text file path
@@ -221,23 +227,22 @@ class ExtractReviewSlate(openpype.api.Extractor):
])
# concat slate and videos together
- conc_input_args = ["-y", "-f concat", "-safe 0"]
- conc_input_args.append("-i {}".format(conc_text_path))
-
- conc_output_args = ["-c copy"]
- conc_output_args.append(output_path)
-
concat_args = [
ffmpeg_path,
- " ".join(conc_input_args),
- " ".join(conc_output_args)
+ "-y",
+ "-f", "concat",
+ "-safe", "0",
+ "-i", conc_text_path,
+ "-c", "copy",
+ output_path
]
- concat_subprcs_cmd = " ".join(concat_args)
# ffmpeg concat subprocess
- self.log.debug("Executing concat: {}".format(concat_subprcs_cmd))
+ self.log.debug(
+ "Executing concat: {}".format(" ".join(concat_args))
+ )
openpype.api.run_subprocess(
- concat_subprcs_cmd, shell=True, logger=self.log
+ concat_args, logger=self.log
)
self.log.debug("__ repre[tags]: {}".format(repre["tags"]))
diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py
index f9e9b43f08..451ea1d80d 100644
--- a/openpype/plugins/publish/integrate_new.py
+++ b/openpype/plugins/publish/integrate_new.py
@@ -63,6 +63,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"animation",
"model",
"mayaAscii",
+ "mayaScene",
"setdress",
"layout",
"ass",
@@ -106,12 +107,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"family", "hierarchy", "task", "username"
]
default_template_name = "publish"
- template_name_profiles = None
+
+ # suffix to denote temporary files, use without '.'
+ TMP_FILE_EXT = 'tmp'
# file_url : file_size of all published and uploaded files
integrated_file_sizes = {}
- TMP_FILE_EXT = 'tmp' # suffix to denote temporary files, use without '.'
+ # Attributes set by settings
+ template_name_profiles = None
+ subset_grouping_profiles = None
def process(self, instance):
self.integrated_file_sizes = {}
@@ -165,10 +170,24 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
hierarchy = "/".join(parents)
anatomy_data["hierarchy"] = hierarchy
+ # Make sure task name in anatomy data is same as on instance.data
task_name = instance.data.get("task")
if task_name:
anatomy_data["task"] = task_name
+ else:
+ # Just set 'task_name' variable to context task
+ task_name = anatomy_data["task"]
+ # Find task type for current task name
+ # - this should be already prepared on instance
+ asset_tasks = (
+ asset_entity.get("data", {}).get("tasks")
+ ) or {}
+ task_info = asset_tasks.get(task_name) or {}
+ task_type = task_info.get("type")
+ instance.data["task_type"] = task_type
+
+ # Fill family in anatomy data
anatomy_data["family"] = instance.data.get("family")
stagingdir = instance.data.get("stagingDir")
@@ -298,14 +317,19 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
else:
orig_transfers = list(instance.data['transfers'])
- task_name = io.Session.get("AVALON_TASK")
family = self.main_family_from_instance(instance)
- key_values = {"families": family,
- "tasks": task_name,
- "hosts": instance.data["anatomyData"]["app"]}
- profile = filter_profiles(self.template_name_profiles, key_values,
- logger=self.log)
+ key_values = {
+ "families": family,
+ "tasks": task_name,
+ "hosts": instance.context.data["hostName"],
+ "task_types": task_type
+ }
+ profile = filter_profiles(
+ self.template_name_profiles,
+ key_values,
+ logger=self.log
+ )
template_name = "publish"
if profile:
@@ -730,6 +754,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
subset = io.find_one({"_id": _id})
+ # QUESTION Why is changing of group and updating it's
+ # families in 'get_subset'?
self._set_subset_group(instance, subset["_id"])
# Update families on subset.
@@ -753,54 +779,74 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
subset_id (str): DB's subset _id
"""
- # add group if available
- integrate_new_sett = (instance.context.data["project_settings"]
- ["global"]
- ["publish"]
- ["IntegrateAssetNew"])
-
- profiles = integrate_new_sett["subset_grouping_profiles"]
-
- filtering_criteria = {
- "families": instance.data["family"],
- "hosts": instance.data["anatomyData"]["app"],
- "tasks": instance.data["anatomyData"]["task"] or
- io.Session["AVALON_TASK"]
- }
- matching_profile = filter_profiles(profiles, filtering_criteria)
-
- filled_template = None
- if matching_profile:
- template = matching_profile["template"]
- fill_pairs = (
- ("family", filtering_criteria["families"]),
- ("task", filtering_criteria["tasks"]),
- ("host", filtering_criteria["hosts"]),
- ("subset", instance.data["subset"]),
- ("renderlayer", instance.data.get("renderlayer"))
- )
- fill_pairs = prepare_template_data(fill_pairs)
-
- try:
- filled_template = \
- format_template_with_optional_keys(fill_pairs, template)
- except KeyError:
- keys = []
- if fill_pairs:
- keys = fill_pairs.keys()
-
- msg = "Subset grouping failed. " \
- "Only {} are expected in Settings".format(','.join(keys))
- self.log.warning(msg)
-
- if instance.data.get("subsetGroup") or filled_template:
- subset_group = instance.data.get('subsetGroup') or filled_template
+ # Fist look into instance data
+ subset_group = instance.data.get("subsetGroup")
+ if not subset_group:
+ subset_group = self._get_subset_group(instance)
+ if subset_group:
io.update_many({
'type': 'subset',
'_id': io.ObjectId(subset_id)
}, {'$set': {'data.subsetGroup': subset_group}})
+ def _get_subset_group(self, instance):
+ """Look into subset group profiles set by settings.
+
+ Attribute 'subset_grouping_profiles' is defined by OpenPype settings.
+ """
+ # Skip if 'subset_grouping_profiles' is empty
+ if not self.subset_grouping_profiles:
+ return None
+
+ # QUESTION
+ # - is there a chance that task name is not filled in anatomy
+ # data?
+ # - should we use context task in that case?
+ task_name = (
+ instance.data["anatomyData"]["task"]
+ or io.Session["AVALON_TASK"]
+ )
+ task_type = instance.data["task_type"]
+ filtering_criteria = {
+ "families": instance.data["family"],
+ "hosts": instance.context.data["hostName"],
+ "tasks": task_name,
+ "task_types": task_type
+ }
+ matching_profile = filter_profiles(
+ self.subset_grouping_profiles,
+ filtering_criteria
+ )
+ # Skip if there is not matchin profile
+ if not matching_profile:
+ return None
+
+ filled_template = None
+ template = matching_profile["template"]
+ fill_pairs = (
+ ("family", filtering_criteria["families"]),
+ ("task", filtering_criteria["tasks"]),
+ ("host", filtering_criteria["hosts"]),
+ ("subset", instance.data["subset"]),
+ ("renderlayer", instance.data.get("renderlayer"))
+ )
+ fill_pairs = prepare_template_data(fill_pairs)
+
+ try:
+ filled_template = \
+ format_template_with_optional_keys(fill_pairs, template)
+ except KeyError:
+ keys = []
+ if fill_pairs:
+ keys = fill_pairs.keys()
+
+ msg = "Subset grouping failed. " \
+ "Only {} are expected in Settings".format(','.join(keys))
+ self.log.warning(msg)
+
+ return filled_template
+
def create_version(self, subset, version_number, data=None):
""" Copy given source to destination
diff --git a/openpype/plugins/publish/start_timer.py b/openpype/plugins/publish/start_timer.py
index 6312294bf1..112d92bef0 100644
--- a/openpype/plugins/publish/start_timer.py
+++ b/openpype/plugins/publish/start_timer.py
@@ -1,6 +1,5 @@
import pyblish.api
-from openpype.api import get_system_settings
from openpype.lib import change_timer_to_current_context
@@ -10,6 +9,6 @@ class StartTimer(pyblish.api.ContextPlugin):
hosts = ["*"]
def process(self, context):
- modules_settings = get_system_settings()["modules"]
+ modules_settings = context.data["system_settings"]["modules"]
if modules_settings["timers_manager"]["disregard_publishing"]:
change_timer_to_current_context()
diff --git a/openpype/plugins/publish/stop_timer.py b/openpype/plugins/publish/stop_timer.py
index 81afd16378..414e43a3c4 100644
--- a/openpype/plugins/publish/stop_timer.py
+++ b/openpype/plugins/publish/stop_timer.py
@@ -3,16 +3,14 @@ import requests
import pyblish.api
-from openpype.api import get_system_settings
-
class StopTimer(pyblish.api.ContextPlugin):
label = "Stop Timer"
- order = pyblish.api.ExtractorOrder - 0.5
+ order = pyblish.api.ExtractorOrder - 0.49
hosts = ["*"]
def process(self, context):
- modules_settings = get_system_settings()["modules"]
+ modules_settings = context.data["system_settings"]["modules"]
if modules_settings["timers_manager"]["disregard_publishing"]:
webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL")
rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url)
diff --git a/openpype/plugins/publish/validate_containers.py b/openpype/plugins/publish/validate_containers.py
index 52df493451..784221c3b6 100644
--- a/openpype/plugins/publish/validate_containers.py
+++ b/openpype/plugins/publish/validate_containers.py
@@ -1,7 +1,5 @@
import pyblish.api
-
import openpype.lib
-from avalon.tools import cbsceneinventory
class ShowInventory(pyblish.api.Action):
@@ -11,7 +9,9 @@ class ShowInventory(pyblish.api.Action):
on = "failed"
def process(self, context, plugin):
- cbsceneinventory.show()
+ from avalon.tools import sceneinventory
+
+ sceneinventory.show()
class ValidateContainers(pyblish.api.ContextPlugin):
diff --git a/openpype/plugins/publish/validate_ffmpeg_installed.py b/openpype/plugins/publish/validate_ffmpeg_installed.py
deleted file mode 100644
index a5390a07b2..0000000000
--- a/openpype/plugins/publish/validate_ffmpeg_installed.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import pyblish.api
-import os
-import subprocess
-import openpype.lib
-try:
- import os.errno as errno
-except ImportError:
- import errno
-
-
-class ValidateFFmpegInstalled(pyblish.api.ContextPlugin):
- """Validate availability of ffmpeg tool in PATH"""
-
- order = pyblish.api.ValidatorOrder
- label = 'Validate ffmpeg installation'
- optional = True
-
- def is_tool(self, name):
- try:
- devnull = open(os.devnull, "w")
- subprocess.Popen(
- [name], stdout=devnull, stderr=devnull
- ).communicate()
- except OSError as e:
- if e.errno == errno.ENOENT:
- return False
- return True
-
- def process(self, context):
- ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
- self.log.info("ffmpeg path: `{}`".format(ffmpeg_path))
- if self.is_tool("{}".format(ffmpeg_path)) is False:
- self.log.error("ffmpeg not found in PATH")
- raise RuntimeError('ffmpeg not installed.')
diff --git a/openpype/plugins/publish/validate_intent.py b/openpype/plugins/publish/validate_intent.py
index 80bcb0e164..23d57bb2b7 100644
--- a/openpype/plugins/publish/validate_intent.py
+++ b/openpype/plugins/publish/validate_intent.py
@@ -1,5 +1,7 @@
-import pyblish.api
import os
+import pyblish.api
+
+from openpype.lib import filter_profiles
class ValidateIntent(pyblish.api.ContextPlugin):
@@ -12,20 +14,49 @@ class ValidateIntent(pyblish.api.ContextPlugin):
order = pyblish.api.ValidatorOrder
label = "Validate Intent"
- # TODO: this should be off by default and only activated viac config
- tasks = ["animation"]
- hosts = ["harmony"]
- if os.environ.get("AVALON_TASK") not in tasks:
- active = False
+ enabled = False
+
+ # Can be modified by settings
+ profiles = [{
+ "hosts": [],
+ "task_types": [],
+ "tasks": [],
+ "validate": False
+ }]
def process(self, context):
+ # Skip if there are no profiles
+ validate = True
+ if self.profiles:
+ # Collect data from context
+ task_name = context.data.get("task")
+ task_type = context.data.get("taskType")
+ host_name = context.data.get("hostName")
+
+ filter_data = {
+ "hosts": host_name,
+ "task_types": task_type,
+ "tasks": task_name
+ }
+ matching_profile = filter_profiles(
+ self.profiles, filter_data, logger=self.log
+ )
+ if matching_profile:
+ validate = matching_profile["validate"]
+
+ if not validate:
+ self.log.debug((
+ "Validation of intent was skipped."
+ " Matching profile for current context disabled validation."
+ ))
+ return
+
msg = (
"Please make sure that you select the intent of this publish."
)
- intent = context.data.get("intent")
- self.log.debug(intent)
- assert intent, msg
-
+ intent = context.data.get("intent") or {}
+ self.log.debug(str(intent))
intent_value = intent.get("value")
- assert intent is not "", msg
+ if not intent_value:
+ raise AssertionError(msg)
diff --git a/openpype/plugins/publish/validate_version.py b/openpype/plugins/publish/validate_version.py
index 6701041541..927e024476 100644
--- a/openpype/plugins/publish/validate_version.py
+++ b/openpype/plugins/publish/validate_version.py
@@ -12,6 +12,9 @@ class ValidateVersion(pyblish.api.InstancePlugin):
label = "Validate Version"
hosts = ["nuke", "maya", "blender", "standalonepublisher"]
+ optional = False
+ active = True
+
def process(self, instance):
version = instance.data.get("version")
latest_version = instance.data.get("latestVersion")
diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py
index c18fe36667..5288749e8b 100644
--- a/openpype/pype_commands.py
+++ b/openpype/pype_commands.py
@@ -257,3 +257,30 @@ class PypeCommands:
def validate_jsons(self):
pass
+ def run_tests(self, folder, mark, pyargs):
+ """
+ Runs tests from 'folder'
+
+ Args:
+ folder (str): relative path to folder with tests
+ mark (str): label to run tests marked by it (slow etc)
+ pyargs (str): package path to test
+ """
+ print("run_tests")
+ import subprocess
+
+ if folder:
+ folder = " ".join(list(folder))
+ else:
+ folder = "../tests"
+
+ mark_str = pyargs_str = ''
+ if mark:
+ mark_str = "-m {}".format(mark)
+
+ if pyargs:
+ pyargs_str = "--pyargs {}".format(pyargs)
+
+ cmd = "pytest {} {} {}".format(folder, mark_str, pyargs_str)
+ print("Running {}".format(cmd))
+ subprocess.run(cmd)
diff --git a/openpype/resources/__init__.py b/openpype/resources/__init__.py
index ef4ed73974..c6886fea73 100644
--- a/openpype/resources/__init__.py
+++ b/openpype/resources/__init__.py
@@ -1,5 +1,5 @@
import os
-
+from openpype.lib.pype_info import is_running_staging
RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__))
@@ -30,22 +30,22 @@ def get_liberation_font_path(bold=False, italic=False):
return font_path
-def pype_icon_filepath(debug=None):
- if debug is None:
- debug = bool(os.getenv("OPENPYPE_DEV"))
+def get_openpype_icon_filepath(staging=None):
+ if staging is None:
+ staging = is_running_staging()
- if debug:
+ if staging:
icon_file_name = "openpype_icon_staging.png"
else:
icon_file_name = "openpype_icon.png"
return get_resource("icons", icon_file_name)
-def pype_splash_filepath(debug=None):
- if debug is None:
- debug = bool(os.getenv("OPENPYPE_DEV"))
+def get_openpype_splash_filepath(staging=None):
+ if staging is None:
+ staging = is_running_staging()
- if debug:
+ if staging:
splash_file_name = "openpype_splash_staging.png"
else:
splash_file_name = "openpype_splash.png"
diff --git a/openpype/scripts/otio_burnin.py b/openpype/scripts/otio_burnin.py
index dc8d60cb37..184d7689e3 100644
--- a/openpype/scripts/otio_burnin.py
+++ b/openpype/scripts/otio_burnin.py
@@ -69,7 +69,7 @@ def get_fps(str_value):
return str(fps)
-def _prores_codec_args(ffprobe_data):
+def _prores_codec_args(ffprobe_data, source_ffmpeg_cmd):
output = []
tags = ffprobe_data.get("tags") or {}
@@ -108,14 +108,24 @@ def _prores_codec_args(ffprobe_data):
return output
-def _h264_codec_args(ffprobe_data):
+def _h264_codec_args(ffprobe_data, source_ffmpeg_cmd):
output = []
output.extend(["-codec:v", "h264"])
- bit_rate = ffprobe_data.get("bit_rate")
- if bit_rate:
- output.extend(["-b:v", bit_rate])
+ # Use arguments from source if are available source arguments
+ if source_ffmpeg_cmd:
+ copy_args = (
+ "-crf",
+ "-b:v", "-vb",
+ "-minrate", "-minrate:",
+ "-maxrate", "-maxrate:",
+ "-bufsize", "-bufsize:"
+ )
+ args = source_ffmpeg_cmd.split(" ")
+ for idx, arg in enumerate(args):
+ if arg in copy_args:
+ output.extend([arg, args[idx + 1]])
pix_fmt = ffprobe_data.get("pix_fmt")
if pix_fmt:
@@ -127,15 +137,15 @@ def _h264_codec_args(ffprobe_data):
return output
-def get_codec_args(ffprobe_data):
+def get_codec_args(ffprobe_data, source_ffmpeg_cmd):
codec_name = ffprobe_data.get("codec_name")
# Codec "prores"
if codec_name == "prores":
- return _prores_codec_args(ffprobe_data)
+ return _prores_codec_args(ffprobe_data, source_ffmpeg_cmd)
# Codec "h264"
if codec_name == "h264":
- return _h264_codec_args(ffprobe_data)
+ return _h264_codec_args(ffprobe_data, source_ffmpeg_cmd)
output = []
if codec_name:
@@ -469,7 +479,7 @@ def example(input_path, output_path):
def burnins_from_data(
input_path, output_path, data,
codec_data=None, options=None, burnin_values=None, overwrite=True,
- full_input_path=None, first_frame=None
+ full_input_path=None, first_frame=None, source_ffmpeg_cmd=None
):
"""This method adds burnins to video/image file based on presets setting.
@@ -647,7 +657,7 @@ def burnins_from_data(
else:
ffprobe_data = burnin._streams[0]
- ffmpeg_args.extend(get_codec_args(ffprobe_data))
+ ffmpeg_args.extend(get_codec_args(ffprobe_data, source_ffmpeg_cmd))
# Use group one (same as `-intra` argument, which is deprecated)
ffmpeg_args_str = " ".join(ffmpeg_args)
@@ -670,6 +680,7 @@ if __name__ == "__main__":
options=in_data.get("options"),
burnin_values=in_data.get("values"),
full_input_path=in_data.get("full_input_path"),
- first_frame=in_data.get("first_frame")
+ first_frame=in_data.get("first_frame"),
+ source_ffmpeg_cmd=in_data.get("ffmpeg_cmd")
)
print("* Burnin script has finished")
diff --git a/openpype/settings/__init__.py b/openpype/settings/__init__.py
index 0adb5db0bd..9d7598a948 100644
--- a/openpype/settings/__init__.py
+++ b/openpype/settings/__init__.py
@@ -25,7 +25,8 @@ from .lib import (
)
from .entities import (
SystemSettings,
- ProjectSettings
+ ProjectSettings,
+ DefaultsNotDefined
)
@@ -53,5 +54,6 @@ __all__ = (
"get_local_settings",
"SystemSettings",
- "ProjectSettings"
+ "ProjectSettings",
+ "DefaultsNotDefined"
)
diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json
index 387e12bcea..983ac603f9 100644
--- a/openpype/settings/defaults/project_anatomy/attributes.json
+++ b/openpype/settings/defaults/project_anatomy/attributes.json
@@ -22,5 +22,6 @@
"aftereffects/2021",
"unreal/4-26"
],
- "tools_env": []
+ "tools_env": [],
+ "active": true
}
\ No newline at end of file
diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json
index fcebc876f5..38313a3d84 100644
--- a/openpype/settings/defaults/project_anatomy/imageio.json
+++ b/openpype/settings/defaults/project_anatomy/imageio.json
@@ -124,9 +124,47 @@
"value": "True"
}
]
+ },
+ {
+ "plugins": [
+ "CreateWriteStill"
+ ],
+ "nukeNodeClass": "Write",
+ "knobs": [
+ {
+ "name": "file_type",
+ "value": "tiff"
+ },
+ {
+ "name": "datatype",
+ "value": "16 bit"
+ },
+ {
+ "name": "compression",
+ "value": "Deflate"
+ },
+ {
+ "name": "tile_color",
+ "value": "0x23ff00ff"
+ },
+ {
+ "name": "channels",
+ "value": "rgb"
+ },
+ {
+ "name": "colorspace",
+ "value": "sRGB"
+ },
+ {
+ "name": "create_directories",
+ "value": "True"
+ }
+ ]
}
],
- "customNodes": []
+ "customNodes": [
+
+ ]
},
"regexInputs": {
"inputs": [
diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json
index 692176a585..b3ea77a584 100644
--- a/openpype/settings/defaults/project_settings/ftrack.json
+++ b/openpype/settings/defaults/project_settings/ftrack.json
@@ -209,6 +209,7 @@
"standalonepublisher"
],
"families": [],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
@@ -221,6 +222,7 @@
"matchmove",
"shot"
],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": []
@@ -232,6 +234,7 @@
"families": [
"plate"
],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": [
@@ -256,6 +259,7 @@
"rig",
"camera"
],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
@@ -267,6 +271,7 @@
"families": [
"renderPass"
],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": []
@@ -276,6 +281,7 @@
"tvpaint"
],
"families": [],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
@@ -288,6 +294,7 @@
"write",
"render"
],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": false,
"advanced_filtering": [
@@ -307,6 +314,7 @@
"render",
"workfile"
],
+ "task_types": [],
"tasks": [],
"add_ftrack_family": true,
"advanced_filtering": []
diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json
index a53ae14914..45c1a59d17 100644
--- a/openpype/settings/defaults/project_settings/global.json
+++ b/openpype/settings/defaults/project_settings/global.json
@@ -6,7 +6,12 @@
},
"ValidateVersion": {
"enabled": true,
- "optional": false
+ "optional": false,
+ "active": true
+ },
+ "ValidateIntent": {
+ "enabled": false,
+ "profiles": []
},
"IntegrateHeroVersion": {
"enabled": true,
@@ -19,7 +24,7 @@
"animation",
"setdress",
"layout",
- "mayaAscii"
+ "mayaScene"
]
},
"ExtractJpegEXR": {
@@ -152,6 +157,7 @@
{
"families": [],
"hosts": [],
+ "task_types": [],
"tasks": [],
"template_name": "publish"
},
@@ -162,6 +168,7 @@
"prerender"
],
"hosts": [],
+ "task_types": [],
"tasks": [],
"template_name": "render"
}
@@ -170,6 +177,7 @@
{
"families": [],
"hosts": [],
+ "task_types": [],
"tasks": [],
"template": ""
}
@@ -205,6 +213,7 @@
{
"families": [],
"hosts": [],
+ "task_types": [],
"tasks": [],
"template": "{family}{Variant}"
},
@@ -213,6 +222,7 @@
"render"
],
"hosts": [],
+ "task_types": [],
"tasks": [],
"template": "{family}{Task}{Variant}"
},
@@ -224,6 +234,7 @@
"hosts": [
"tvpaint"
],
+ "task_types": [],
"tasks": [],
"template": "{family}{Task}_{Render_layer}_{Render_pass}"
},
@@ -235,6 +246,7 @@
"hosts": [
"tvpaint"
],
+ "task_types": [],
"tasks": [],
"template": "{family}{Task}"
},
@@ -245,6 +257,7 @@
"hosts": [
"aftereffects"
],
+ "task_types": [],
"tasks": [],
"template": "render{Task}{Variant}"
}
@@ -261,6 +274,7 @@
"last_workfile_on_startup": [
{
"hosts": [],
+ "task_types": [],
"tasks": [],
"enabled": true
}
@@ -268,6 +282,7 @@
"open_workfile_tool_on_startup": [
{
"hosts": [],
+ "task_types": [],
"tasks": [],
"enabled": false
}
@@ -287,6 +302,15 @@
"textures"
]
}
+ },
+ "loader": {
+ "family_filter_profiles": [
+ {
+ "hosts": [],
+ "task_types": [],
+ "filter_families": []
+ }
+ ]
}
},
"project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets[ftrack.Library]\": {\"characters[ftrack]\": {}, \"locations[ftrack]\": {}}, \"shots[ftrack.Sequence]\": {\"scripts\": {}, \"editorial[ftrack.Folder]\": {}}}}",
diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json
index f9911897d7..c592d74350 100644
--- a/openpype/settings/defaults/project_settings/maya.json
+++ b/openpype/settings/defaults/project_settings/maya.json
@@ -156,6 +156,11 @@
"CollectMayaRender": {
"sync_workfile_version": false
},
+ "ValidateInstanceInContext": {
+ "enabled": true,
+ "optional": true,
+ "active": true
+ },
"ValidateContainers": {
"enabled": true,
"optional": true,
@@ -169,6 +174,11 @@
"enabled": false,
"attributes": {}
},
+ "ValidateLoadedPlugin": {
+ "enabled": false,
+ "whitelist_native_plugins": false,
+ "authorized_plugins": []
+ },
"ValidateRenderSettings": {
"arnold_render_attributes": [],
"vray_render_attributes": [],
@@ -479,6 +489,12 @@
255,
255
],
+ "mayaScene": [
+ 67,
+ 174,
+ 255,
+ 255
+ ],
"setdress": [
255,
250,
@@ -520,6 +536,7 @@
"workfile_build": {
"profiles": [
{
+ "task_types": [],
"tasks": [
"Lighting"
],
diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json
index 136f1d6b42..dd65df02e5 100644
--- a/openpype/settings/defaults/project_settings/nuke.json
+++ b/openpype/settings/defaults/project_settings/nuke.json
@@ -30,7 +30,18 @@
},
"publish": {
"PreCollectNukeInstances": {
- "sync_workfile_version": true
+ "sync_workfile_version_on_families": [
+ "nukenodes",
+ "camera",
+ "gizmo",
+ "source",
+ "render"
+ ]
+ },
+ "ValidateInstanceInContext": {
+ "enabled": true,
+ "optional": true,
+ "active": true
},
"ValidateContainers": {
"enabled": true,
@@ -96,6 +107,11 @@
},
"ExtractSlateFrame": {
"viewer_lut_raw": false
+ },
+ "IncrementScriptVersion": {
+ "enabled": true,
+ "optional": true,
+ "active": true
}
},
"load": {
@@ -116,7 +132,8 @@
"jpg",
"jpeg",
"png",
- "psd"
+ "psd",
+ "tiff"
],
"node_name_template": "{class_name}_{ext}"
},
@@ -163,6 +180,7 @@
"builder_on_start": false,
"profiles": [
{
+ "task_types": [],
"tasks": [],
"current_context": [
{
diff --git a/openpype/settings/defaults/project_settings/slack.json b/openpype/settings/defaults/project_settings/slack.json
index e70ef77fd2..2d10bd173d 100644
--- a/openpype/settings/defaults/project_settings/slack.json
+++ b/openpype/settings/defaults/project_settings/slack.json
@@ -7,8 +7,9 @@
"profiles": [
{
"families": [],
- "tasks": [],
"hosts": [],
+ "task_types": [],
+ "tasks": [],
"channel_messages": []
}
]
diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json
index 47f486aa98..528bf6de8e 100644
--- a/openpype/settings/defaults/project_settings/tvpaint.json
+++ b/openpype/settings/defaults/project_settings/tvpaint.json
@@ -1,4 +1,5 @@
{
+ "stop_timer_on_application_exit": false,
"publish": {
"ExtractSequence": {
"review_bg": [
diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json
index 842c294599..cfdeca4b87 100644
--- a/openpype/settings/defaults/system_settings/applications.json
+++ b/openpype/settings/defaults/system_settings/applications.json
@@ -195,7 +195,7 @@
"environment": {}
},
"__dynamic_keys_labels__": {
- "13-0": "13.0 (Testing only)",
+ "13-0": "13.0",
"12-2": "12.2",
"12-0": "12.0",
"11-3": "11.3",
@@ -331,7 +331,7 @@
"environment": {}
},
"__dynamic_keys_labels__": {
- "13-0": "13.0 (Testing only)",
+ "13-0": "13.0",
"12-2": "12.2",
"12-0": "12.0",
"11-3": "11.3",
diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json
index d03fedf3c9..f54e8b2b16 100644
--- a/openpype/settings/defaults/system_settings/general.json
+++ b/openpype/settings/defaults/system_settings/general.json
@@ -7,6 +7,11 @@
"global": []
}
},
+ "disk_mapping": {
+ "windows": [],
+ "linux": [],
+ "darwin": []
+ },
"openpype_path": {
"windows": [],
"darwin": [],
diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json
index 229b867327..beb1eb4f24 100644
--- a/openpype/settings/defaults/system_settings/modules.json
+++ b/openpype/settings/defaults/system_settings/modules.json
@@ -179,4 +179,4 @@
"slack": {
"enabled": false
}
-}
+}
\ No newline at end of file
diff --git a/openpype/settings/entities/__init__.py b/openpype/settings/entities/__init__.py
index 8c30d5044c..aae2d1fa89 100644
--- a/openpype/settings/entities/__init__.py
+++ b/openpype/settings/entities/__init__.py
@@ -105,7 +105,6 @@ from .enum_entity import (
AppsEnumEntity,
ToolsEnumEntity,
TaskTypeEnumEntity,
- ProvidersEnum,
DeadlineUrlEnumEntity,
AnatomyTemplatesEnumEntity
)
@@ -113,7 +112,10 @@ from .enum_entity import (
from .list_entity import ListEntity
from .dict_immutable_keys_entity import DictImmutableKeysEntity
from .dict_mutable_keys_entity import DictMutableKeysEntity
-from .dict_conditional import DictConditionalEntity
+from .dict_conditional import (
+ DictConditionalEntity,
+ SyncServerProviders
+)
from .anatomy_entities import AnatomyEntity
@@ -161,7 +163,6 @@ __all__ = (
"AppsEnumEntity",
"ToolsEnumEntity",
"TaskTypeEnumEntity",
- "ProvidersEnum",
"DeadlineUrlEnumEntity",
"AnatomyTemplatesEnumEntity",
@@ -172,6 +173,7 @@ __all__ = (
"DictMutableKeysEntity",
"DictConditionalEntity",
+ "SyncServerProviders",
"AnatomyEntity"
)
diff --git a/openpype/settings/entities/dict_conditional.py b/openpype/settings/entities/dict_conditional.py
index d7b416921c..6f27760570 100644
--- a/openpype/settings/entities/dict_conditional.py
+++ b/openpype/settings/entities/dict_conditional.py
@@ -724,3 +724,49 @@ class DictConditionalEntity(ItemEntity):
for children in self.children.values():
for child_entity in children:
child_entity.reset_callbacks()
+
+
+class SyncServerProviders(DictConditionalEntity):
+ schema_types = ["sync-server-providers"]
+
+ def _add_children(self):
+ self.enum_key = "provider"
+ self.enum_label = "Provider"
+
+ enum_children = self._get_enum_children()
+ if not enum_children:
+ enum_children.append({
+ "key": None,
+ "label": "< Nothing >"
+ })
+ self.enum_children = enum_children
+
+ super(SyncServerProviders, self)._add_children()
+
+ def _get_enum_children(self):
+ from openpype_modules import sync_server
+
+ from openpype_modules.sync_server.providers import lib as lib_providers
+
+ provider_code_to_label = {}
+ providers = lib_providers.factory.providers
+ for provider_code, provider_info in providers.items():
+ provider, _ = provider_info
+ provider_code_to_label[provider_code] = provider.LABEL
+
+ system_settings_schema = (
+ sync_server
+ .SyncServerModule
+ .get_system_settings_schema()
+ )
+
+ enum_children = []
+ for provider_code, configurables in system_settings_schema.items():
+ label = provider_code_to_label.get(provider_code) or provider_code
+
+ enum_children.append({
+ "key": provider_code,
+ "label": label,
+ "children": configurables
+ })
+ return enum_children
diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py
index cb532c5ae0..a5e734f039 100644
--- a/openpype/settings/entities/enum_entity.py
+++ b/openpype/settings/entities/enum_entity.py
@@ -376,11 +376,16 @@ class TaskTypeEnumEntity(BaseEnumEntity):
schema_types = ["task-types-enum"]
def _item_initalization(self):
- self.multiselection = True
- self.value_on_not_set = []
+ self.multiselection = self.schema_data.get("multiselection", True)
+ if self.multiselection:
+ self.valid_value_types = (list, )
+ self.value_on_not_set = []
+ else:
+ self.valid_value_types = (STRING_TYPE, )
+ self.value_on_not_set = ""
+
self.enum_items = []
self.valid_keys = set()
- self.valid_value_types = (list, )
self.placeholder = None
def _get_enum_values(self):
@@ -396,53 +401,51 @@ class TaskTypeEnumEntity(BaseEnumEntity):
return enum_items, valid_keys
+ def _convert_value_for_current_state(self, source_value):
+ if self.multiselection:
+ output = []
+ for key in source_value:
+ if key in self.valid_keys:
+ output.append(key)
+ return output
+
+ if source_value not in self.valid_keys:
+ # Take first item from enum items
+ for item in self.enum_items:
+ for key in item.keys():
+ source_value = key
+ break
+ return source_value
+
def set_override_state(self, *args, **kwargs):
super(TaskTypeEnumEntity, self).set_override_state(*args, **kwargs)
self.enum_items, self.valid_keys = self._get_enum_values()
- new_value = []
- for key in self._current_value:
- if key in self.valid_keys:
- new_value.append(key)
- self._current_value = new_value
+ if self.multiselection:
+ new_value = []
+ for key in self._current_value:
+ if key in self.valid_keys:
+ new_value.append(key)
-class ProvidersEnum(BaseEnumEntity):
- schema_types = ["providers-enum"]
+ if self._current_value != new_value:
+ self.set(new_value)
+ else:
+ if not self.enum_items:
+ self.valid_keys.add("")
+ self.enum_items.append({"": "< Empty >"})
- def _item_initalization(self):
- self.multiselection = False
- self.value_on_not_set = ""
- self.enum_items = []
- self.valid_keys = set()
- self.valid_value_types = (str, )
- self.placeholder = None
+ for item in self.enum_items:
+ for key in item.keys():
+ value_on_not_set = key
+ break
- def _get_enum_values(self):
- from openpype_modules.sync_server.providers import lib as lib_providers
-
- providers = lib_providers.factory.providers
-
- valid_keys = set()
- valid_keys.add('')
- enum_items = [{'': 'Choose Provider'}]
- for provider_code, provider_info in providers.items():
- provider, _ = provider_info
- enum_items.append({provider_code: provider.LABEL})
- valid_keys.add(provider_code)
-
- return enum_items, valid_keys
-
- def set_override_state(self, *args, **kwargs):
- super(ProvidersEnum, self).set_override_state(*args, **kwargs)
-
- self.enum_items, self.valid_keys = self._get_enum_values()
-
- value_on_not_set = list(self.valid_keys)[0]
- if self._current_value is NOT_SET:
- self._current_value = value_on_not_set
-
- self.value_on_not_set = value_on_not_set
+ self.value_on_not_set = value_on_not_set
+ if (
+ self._current_value is NOT_SET
+ or self._current_value not in self.valid_keys
+ ):
+ self.set(value_on_not_set)
class DeadlineUrlEnumEntity(BaseEnumEntity):
diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py
index ebc70b840d..0ded3ab7e5 100644
--- a/openpype/settings/entities/input_entities.py
+++ b/openpype/settings/entities/input_entities.py
@@ -379,6 +379,11 @@ class NumberEntity(InputEntity):
# UI specific attributes
self.show_slider = self.schema_data.get("show_slider", False)
+ steps = self.schema_data.get("steps", None)
+ # Make sure that steps are not set to `0`
+ if steps == 0:
+ steps = None
+ self.steps = steps
def _convert_to_valid_type(self, value):
if isinstance(value, str):
diff --git a/openpype/settings/entities/lib.py b/openpype/settings/entities/lib.py
index f207322dee..bf3868c08d 100644
--- a/openpype/settings/entities/lib.py
+++ b/openpype/settings/entities/lib.py
@@ -168,9 +168,13 @@ class SchemasHub:
if isinstance(def_schema, dict):
def_schema = [def_schema]
+ all_def_schema = []
for item in def_schema:
- item["_dynamic_schema_id"] = def_id
- output.extend(def_schema)
+ items = self.resolve_schema_data(item)
+ for _item in items:
+ _item["_dynamic_schema_id"] = def_id
+ all_def_schema.extend(items)
+ output.extend(all_def_schema)
return output
def get_template_name(self, item_def, default=None):
diff --git a/openpype/settings/entities/schemas/README.md b/openpype/settings/entities/schemas/README.md
index 9b53e89dd7..c8432f0f2e 100644
--- a/openpype/settings/entities/schemas/README.md
+++ b/openpype/settings/entities/schemas/README.md
@@ -316,6 +316,7 @@ How output of the schema could look like on save:
- key `"decimal"` defines how many decimal places will be used, 0 is for integer input (Default: `0`)
- key `"minimum"` as minimum allowed number to enter (Default: `-99999`)
- key `"maxium"` as maximum allowed number to enter (Default: `99999`)
+- key `"steps"` will change single step value of UI inputs (using arrows and wheel scroll)
- for UI it is possible to show slider to enable this option set `show_slider` to `true`
```
{
diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json
index 1cc08b96f8..e50e269695 100644
--- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json
+++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json
@@ -650,6 +650,11 @@
"type": "list",
"object_type": "text"
},
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
{
"key": "tasks",
"label": "Task names",
diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json
index 170de7c8a2..9ca4e443bd 100644
--- a/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json
+++ b/openpype/settings/entities/schemas/projects_schema/schema_project_slack.json
@@ -52,18 +52,23 @@
"type": "list",
"object_type": "text"
},
- {
- "key": "tasks",
- "label": "Task names",
- "type": "list",
- "object_type": "text"
- },
{
"type": "hosts-enum",
"key": "hosts",
"label": "Host names",
"multiselection": true
},
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
+ {
+ "key": "tasks",
+ "label": "Task names",
+ "type": "list",
+ "object_type": "text"
+ },
{
"type": "separator"
},
diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json b/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json
index cb2cc9c9d1..3211babd43 100644
--- a/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json
+++ b/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json
@@ -5,64 +5,134 @@
"collapsible": true,
"checkbox_key": "enabled",
"children": [
- {
- "type": "boolean",
- "key": "enabled",
- "label": "Enabled"
- },
- {
- "type": "dict",
- "key": "config",
- "label": "Config",
- "collapsible": true,
- "children": [
- {
- "type": "text",
- "key": "retry_cnt",
- "label": "Retry Count"
- },
- {
- "type": "text",
- "key": "loop_delay",
- "label": "Loop Delay"
- },
- {
- "type": "text",
- "key": "active_site",
- "label": "Active Site"
- },
- {
- "type": "text",
- "key": "remote_site",
- "label": "Remote Site"
- }
- ]
- }, {
- "type": "dict-modifiable",
- "collapsible": true,
- "key": "sites",
- "label": "Sites",
- "collapsible_key": false,
- "object_type":
+ {
+ "type": "boolean",
+ "key": "enabled",
+ "label": "Enabled"
+ },
{
"type": "dict",
+ "key": "config",
+ "label": "Config",
+ "collapsible": true,
"children": [
- {
- "type": "path",
- "key": "credentials_url",
- "label": "Credentials url",
- "multiplatform": true
- },
- {
- "type": "dict-modifiable",
- "key": "root",
- "label": "Roots",
- "collapsable": false,
- "collapsable_key": false,
- "object_type": "text"
- }
+ {
+ "type": "text",
+ "key": "retry_cnt",
+ "label": "Retry Count"
+ },
+ {
+ "type": "text",
+ "key": "loop_delay",
+ "label": "Loop Delay"
+ },
+ {
+ "type": "text",
+ "key": "active_site",
+ "label": "Active Site"
+ },
+ {
+ "type": "text",
+ "key": "remote_site",
+ "label": "Remote Site"
+ }
]
+ },
+ {
+ "type": "dict-modifiable",
+ "collapsible": true,
+ "key": "sites",
+ "label": "Sites",
+ "collapsible_key": false,
+ "object_type": {
+ "type": "dict",
+ "children": [
+ {
+ "type": "dict",
+ "key": "gdrive",
+ "label": "Google Drive",
+ "collapsible": true,
+ "children": [
+ {
+ "type": "path",
+ "key": "credentials_url",
+ "label": "Credentials url",
+ "multiplatform": true
+ }
+ ]
+ },
+ {
+ "type": "dict",
+ "key": "dropbox",
+ "label": "Dropbox",
+ "collapsible": true,
+ "children": [
+ {
+ "type": "text",
+ "key": "token",
+ "label": "Access Token"
+ },
+ {
+ "type": "text",
+ "key": "team_folder_name",
+ "label": "Team Folder Name"
+ },
+ {
+ "type": "text",
+ "key": "acting_as_member",
+ "label": "Acting As Member"
+ }
+ ]
+ },
+ {
+ "type": "dict",
+ "key": "sftp",
+ "label": "SFTP",
+ "collapsible": true,
+ "children": [
+ {
+ "type": "text",
+ "key": "sftp_host",
+ "label": "SFTP host"
+ },
+ {
+ "type": "number",
+ "key": "sftp_port",
+ "label": "SFTP port"
+ },
+ {
+ "type": "text",
+ "key": "sftp_user",
+ "label": "SFTP user"
+ },
+ {
+ "type": "text",
+ "key": "sftp_pass",
+ "label": "SFTP pass"
+ },
+ {
+ "type": "path",
+ "key": "sftp_key",
+ "label": "SFTP user ssh key",
+ "multiplatform": true
+ },
+ {
+ "type": "text",
+ "key": "sftp_key_pass",
+ "label": "SFTP user ssh key password"
+ }
+ ]
+ },
+ {
+ "type": "dict-modifiable",
+ "key": "root",
+ "label": "Roots",
+ "collapsable": false,
+ "collapsable_key": false,
+ "object_type": "text"
+ }
+ ]
+ }
}
- }
]
}
diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json
index 368141813f..8286ed1193 100644
--- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json
+++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json
@@ -5,6 +5,11 @@
"label": "TVPaint",
"is_file": true,
"children": [
+ {
+ "type": "boolean",
+ "key": "stop_timer_on_application_exit",
+ "label": "Stop timer on application exit"
+ },
{
"type": "dict",
"collapsible": true,
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json
index 7391108a02..a2a566da0e 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json
@@ -69,6 +69,11 @@
"type": "tools-enum",
"key": "tools_env",
"label": "Tools"
+ },
+ {
+ "type": "boolean",
+ "key": "active",
+ "label": "Active Project"
}
]
}
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json
index 4b91072eb6..c50f383f02 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json
@@ -24,13 +24,22 @@
}
]
},
+ {
+ "type": "schema_template",
+ "name": "template_publish_plugin",
+ "template_data": [
+ {
+ "key": "ValidateVersion",
+ "label": "Validate Version"
+ }
+ ]
+ },
{
"type": "dict",
- "collapsible": true,
- "checkbox_key": "enabled",
- "key": "ValidateVersion",
- "label": "Validate Version",
+ "label": "Validate Intent",
+ "key": "ValidateIntent",
"is_group": true,
+ "checkbox_key": "enabled",
"children": [
{
"type": "boolean",
@@ -38,9 +47,43 @@
"label": "Enabled"
},
{
- "type": "boolean",
- "key": "optional",
- "label": "Optional"
+ "type": "label",
+ "label": "Validate if Publishing intent was selected. It is possible to disable validation for specific publishing context with profiles."
+ },
+ {
+ "type": "list",
+ "collapsible": true,
+ "key": "profiles",
+ "object_type": {
+ "type": "dict",
+ "children": [
+ {
+ "key": "hosts",
+ "label": "Host names",
+ "type": "hosts-enum",
+ "multiselection": true
+ },
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
+ {
+ "key": "tasks",
+ "label": "Task names",
+ "type": "list",
+ "object_type": "text"
+ },
+ {
+ "type": "separator"
+ },
+ {
+ "key": "validate",
+ "label": "Validate",
+ "type": "boolean"
+ }
+ ]
+ }
}
]
},
@@ -502,6 +545,11 @@
"label": "Hosts",
"multiselection": true
},
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
{
"key": "tasks",
"label": "Task names",
@@ -543,6 +591,11 @@
"label": "Hosts",
"multiselection": true
},
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
{
"key": "tasks",
"label": "Task names",
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json
index 245560f115..26d3771d8a 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json
@@ -40,6 +40,11 @@
"label": "Hosts",
"multiselection": true
},
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
{
"key": "tasks",
"label": "Task names",
@@ -126,9 +131,14 @@
"unreal"
]
},
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
{
"key": "tasks",
- "label": "Tasks",
+ "label": "Task names",
"type": "list",
"object_type": "text"
},
@@ -161,9 +171,15 @@
"nuke"
]
},
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "list",
+ "object_type": "task-types-enum"
+ },
{
"key": "tasks",
- "label": "Tasks",
+ "label": "Task names",
"type": "list",
"object_type": "text"
},
@@ -190,6 +206,48 @@
}
}
]
+ },
+ {
+ "type": "dict",
+ "collapsible": true,
+ "key": "loader",
+ "label": "Loader",
+ "children": [
+ {
+ "type": "list",
+ "key": "family_filter_profiles",
+ "label": "Family filtering",
+ "use_label_wrap": true,
+ "object_type": {
+ "type": "dict",
+ "children": [
+ {
+ "type": "hosts-enum",
+ "key": "hosts",
+ "label": "Hosts",
+ "multiselection": true
+ },
+ {
+ "type": "task-types-enum",
+ "key": "task_types",
+ "label": "Task types"
+ },
+ {
+ "type": "splitter"
+ },
+ {
+ "type": "template",
+ "name": "template_publish_families",
+ "template_data": {
+ "key": "filter_families",
+ "label": "Filter families",
+ "multiselection": true
+ }
+ }
+ ]
+ }
+ }
+ ]
}
]
}
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json
index 0b09d08700..7c87644817 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_load.json
@@ -47,9 +47,14 @@
},
{
"type": "color",
- "label": "Maya Scene:",
+ "label": "Maya Ascii:",
"key": "mayaAscii"
},
+ {
+ "type": "color",
+ "label": "Maya Scene:",
+ "key": "mayaScene"
+ },
{
"type": "color",
"label": "Set Dress:",
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json
index 89cd30aed0..26ebfb2bd7 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json
@@ -28,6 +28,16 @@
"type": "label",
"label": "Validators"
},
+ {
+ "type": "schema_template",
+ "name": "template_publish_plugin",
+ "template_data": [
+ {
+ "key": "ValidateInstanceInContext",
+ "label": "Validate Instance In Context"
+ }
+ ]
+ },
{
"type": "schema_template",
"name": "template_publish_plugin",
@@ -82,6 +92,32 @@
]
},
+ {
+ "type": "dict",
+ "collapsible": true,
+ "key": "ValidateLoadedPlugin",
+ "label": "Validate Loaded Plugin",
+ "checkbox_key": "enabled",
+ "children": [
+ {
+ "type": "boolean",
+ "key": "enabled",
+ "label": "Enabled"
+ },
+ {
+ "type": "boolean",
+ "key": "whitelist_native_plugins",
+ "label": "Whitelist Maya Native Plugins"
+ },
+ {
+ "type": "list",
+ "key": "authorized_plugins",
+ "label": "Authorized plugins",
+ "object_type": "text"
+ }
+ ]
+ },
+
{
"type": "dict",
"collapsible": true,
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json
index 782179cfd1..74b2592d29 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json
@@ -16,9 +16,30 @@
"is_group": true,
"children": [
{
- "type": "boolean",
- "key": "sync_workfile_version",
- "label": "Sync Version from workfile"
+ "type": "enum",
+ "key": "sync_workfile_version_on_families",
+ "label": "Sync workfile version for families",
+ "multiselection": true,
+ "enum_items": [
+ {
+ "nukenodes": "nukenodes"
+ },
+ {
+ "camera": "camera"
+ },
+ {
+ "gizmo": "gizmo"
+ },
+ {
+ "source": "source"
+ },
+ {
+ "prerender": "prerender"
+ },
+ {
+ "render": "render"
+ }
+ ]
}
]
},
@@ -29,6 +50,16 @@
"type": "label",
"label": "Validators"
},
+ {
+ "type": "schema_template",
+ "name": "template_publish_plugin",
+ "template_data": [
+ {
+ "key": "ValidateInstanceInContext",
+ "label": "Validate Instance In Context"
+ }
+ ]
+ },
{
"type": "schema_template",
"name": "template_publish_plugin",
@@ -152,6 +183,38 @@
"label": "Viewer LUT raw"
}
]
+ },
+ {
+ "type": "splitter"
+ },
+ {
+ "type": "label",
+ "label": "Integrators"
+ },
+ {
+ "type": "dict",
+ "collapsible": true,
+ "checkbox_key": "enabled",
+ "key": "IncrementScriptVersion",
+ "label": "IncrementScriptVersion",
+ "is_group": true,
+ "children": [
+ {
+ "type": "boolean",
+ "key": "enabled",
+ "label": "Enabled"
+ },
+ {
+ "type": "boolean",
+ "key": "optional",
+ "label": "Optional"
+ },
+ {
+ "type": "boolean",
+ "key": "active",
+ "label": "Active"
+ }
+ ]
}
]
}
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_workfile_build.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_workfile_build.json
index 078bb81bba..2a3f0ae136 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_workfile_build.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_workfile_build.json
@@ -11,9 +11,14 @@
"object_type": {
"type": "dict",
"children": [
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
{
"key": "tasks",
- "label": "Tasks",
+ "label": "Task names",
"type": "list",
"object_type": "text"
},
@@ -94,4 +99,4 @@
}
}
]
-}
\ No newline at end of file
+}
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json
new file mode 100644
index 0000000000..9db1427562
--- /dev/null
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json
@@ -0,0 +1,32 @@
+[
+ {
+ "__default_values__": {
+ "multiselection": true
+ }
+ },
+ {
+ "key": "{key}",
+ "label": "{label}",
+ "multiselection": "{multiselection}",
+ "type": "enum",
+ "enum_items": [
+ {"action": "action"},
+ {"animation": "animation"},
+ {"audio": "audio"},
+ {"camera": "camera"},
+ {"editorial": "editorial"},
+ {"layout": "layout"},
+ {"look": "look"},
+ {"mayaAscii": "mayaAscii"},
+ {"model": "model"},
+ {"pointcache": "pointcache"},
+ {"reference": "reference"},
+ {"render": "render"},
+ {"review": "review"},
+ {"rig": "rig"},
+ {"setdress": "setdress"},
+ {"workfile": "workfile"},
+ {"xgen": "xgen"}
+ ]
+ }
+]
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_workfile_options.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_workfile_options.json
index 815df85879..90fc4fbdd0 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/template_workfile_options.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_workfile_options.json
@@ -55,9 +55,14 @@
"object_type": {
"type": "dict",
"children": [
+ {
+ "key": "task_types",
+ "label": "Task types",
+ "type": "task-types-enum"
+ },
{
"key": "tasks",
- "label": "Tasks",
+ "label": "Task names",
"type": "list",
"object_type": "text"
},
diff --git a/openpype/settings/entities/schemas/system_schema/schema_general.json b/openpype/settings/entities/schemas/system_schema/schema_general.json
index fe5a8d8203..31cd997d14 100644
--- a/openpype/settings/entities/schemas/system_schema/schema_general.json
+++ b/openpype/settings/entities/schemas/system_schema/schema_general.json
@@ -40,6 +40,75 @@
{
"type": "splitter"
},
+ {
+ "type": "dict",
+ "key": "disk_mapping",
+ "label": "Disk mapping",
+ "use_label_wrap": false,
+ "collapsible": false,
+ "children": [
+ {
+ "key": "windows",
+ "label": "Windows",
+ "type": "list",
+ "object_type": {
+ "type": "list-strict",
+ "key": "item",
+ "object_types": [
+ {
+ "label": "Source",
+ "type": "path"
+ },
+ {
+ "label": "Destination",
+ "type": "path"
+ }
+ ]
+ }
+ },
+ {
+ "key": "linux",
+ "label": "Linux",
+ "type": "list",
+ "object_type": {
+ "type": "list-strict",
+ "key": "item",
+ "object_types": [
+ {
+ "label": "Source",
+ "type": "path"
+ },
+ {
+ "label": "Destination",
+ "type": "path"
+ }
+ ]
+ }
+ },
+ {
+ "key": "darwin",
+ "label": "MacOS",
+ "type": "list",
+ "object_type": {
+ "type": "list-strict",
+ "key": "item",
+ "object_types": [
+ {
+ "label": "Source",
+ "type": "path"
+ },
+ {
+ "label": "Destination",
+ "type": "path"
+ }
+ ]
+ }
+ }
+ ]
+ },
+ {
+ "type": "splitter"
+ },
{
"type": "path",
"key": "openpype_path",
diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json
index 31d8e04731..a2b31772e9 100644
--- a/openpype/settings/entities/schemas/system_schema/schema_modules.json
+++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json
@@ -28,7 +28,8 @@
"type": "number",
"key": "AVALON_TIMEOUT",
"minimum": 0,
- "label": "Avalon Mongo Timeout (ms)"
+ "label": "Avalon Mongo Timeout (ms)",
+ "steps": 100
},
{
"type": "path",
@@ -121,14 +122,7 @@
"collapsible_key": false,
"object_type":
{
- "type": "dict",
- "children": [
- {
- "type": "providers-enum",
- "key": "provider",
- "label": "Provider"
- }
- ]
+ "type": "sync-server-providers"
}
}
]
@@ -242,6 +236,10 @@
"label": "Enabled"
}
]
+ },
+ {
+ "type": "dynamic_schema",
+ "name": "system_settings/modules"
}
]
}
diff --git a/openpype/settings/handlers.py b/openpype/settings/handlers.py
index 288fc76801..c59e2bc542 100644
--- a/openpype/settings/handlers.py
+++ b/openpype/settings/handlers.py
@@ -168,7 +168,7 @@ class CacheValues:
class MongoSettingsHandler(SettingsHandler):
"""Settings handler that use mongo for storing and loading of settings."""
- global_general_keys = ("openpype_path", "admin_password")
+ global_general_keys = ("openpype_path", "admin_password", "disk_mapping")
def __init__(self):
# Get mongo connection
diff --git a/openpype/style/__init__.py b/openpype/style/__init__.py
index 87547b1a90..0d7904d133 100644
--- a/openpype/style/__init__.py
+++ b/openpype/style/__init__.py
@@ -91,4 +91,4 @@ def load_stylesheet():
def app_icon_path():
- return resources.pype_icon_filepath()
+ return resources.get_openpype_icon_filepath()
diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py
index 14c6aff4ad..4d86970f9c 100644
--- a/openpype/tools/launcher/actions.py
+++ b/openpype/tools/launcher/actions.py
@@ -84,7 +84,7 @@ class ApplicationAction(api.Action):
def _show_message_box(self, title, message, details=None):
dialog = QtWidgets.QMessageBox()
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
dialog.setWindowIcon(icon)
dialog.setStyleSheet(style.load_stylesheet())
dialog.setWindowTitle(title)
diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py
index 4988829c11..f87871409e 100644
--- a/openpype/tools/launcher/models.py
+++ b/openpype/tools/launcher/models.py
@@ -326,8 +326,6 @@ class ProjectModel(QtGui.QStandardItemModel):
super(ProjectModel, self).__init__(parent=parent)
self.dbcon = dbcon
-
- self.hide_invisible = False
self.project_icon = qtawesome.icon("fa.map", color="white")
self._project_names = set()
@@ -380,16 +378,5 @@ class ProjectModel(QtGui.QStandardItemModel):
self.invisibleRootItem().insertRows(row, items)
def get_projects(self):
- project_docs = []
-
- for project_doc in sorted(
- self.dbcon.projects(), key=lambda x: x["name"]
- ):
- if (
- self.hide_invisible
- and not project_doc["data"].get("visible", True)
- ):
- continue
- project_docs.append(project_doc)
-
- return project_docs
+ return sorted(self.dbcon.projects(only_active=True),
+ key=lambda x: x["name"])
diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py
index bd37a9b89c..9b839fb2bc 100644
--- a/openpype/tools/launcher/window.py
+++ b/openpype/tools/launcher/window.py
@@ -261,7 +261,7 @@ class LauncherWindow(QtWidgets.QDialog):
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose, False)
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
self.setWindowIcon(icon)
self.setStyleSheet(style.load_stylesheet())
@@ -271,7 +271,6 @@ class LauncherWindow(QtWidgets.QDialog):
)
project_model = ProjectModel(self.dbcon)
- project_model.hide_invisible = True
project_handler = ProjectHandler(self.dbcon, project_model)
project_panel = ProjectsPanel(project_handler)
diff --git a/openpype/tools/libraryloader/__init__.py b/openpype/tools/libraryloader/__init__.py
new file mode 100644
index 0000000000..bbf4a1087d
--- /dev/null
+++ b/openpype/tools/libraryloader/__init__.py
@@ -0,0 +1,11 @@
+from .app import (
+ LibraryLoaderWindow,
+ show,
+ cli
+)
+
+__all__ = [
+ "LibraryLoaderWindow",
+ "show",
+ "cli",
+]
diff --git a/openpype/tools/libraryloader/__main__.py b/openpype/tools/libraryloader/__main__.py
new file mode 100644
index 0000000000..d77bc585c5
--- /dev/null
+++ b/openpype/tools/libraryloader/__main__.py
@@ -0,0 +1,5 @@
+from . import cli
+
+if __name__ == '__main__':
+ import sys
+ sys.exit(cli(sys.argv[1:]))
diff --git a/openpype/tools/libraryloader/app.py b/openpype/tools/libraryloader/app.py
new file mode 100644
index 0000000000..8080c547c9
--- /dev/null
+++ b/openpype/tools/libraryloader/app.py
@@ -0,0 +1,589 @@
+import sys
+
+from Qt import QtWidgets, QtCore, QtGui
+
+from avalon import style
+from avalon.api import AvalonMongoDB
+from openpype.tools.utils import lib as tools_lib
+from openpype.tools.loader.widgets import (
+ ThumbnailWidget,
+ VersionWidget,
+ FamilyListView,
+ RepresentationWidget
+)
+from openpype.tools.utils.widgets import AssetWidget
+
+from openpype.modules import ModulesManager
+
+from . import lib
+from .widgets import LibrarySubsetWidget
+
+module = sys.modules[__name__]
+module.window = None
+
+
+class LibraryLoaderWindow(QtWidgets.QDialog):
+ """Asset library loader interface"""
+
+ tool_title = "Library Loader 0.5"
+ tool_name = "library_loader"
+
+ def __init__(
+ self, parent=None, icon=None, show_projects=False, show_libraries=True
+ ):
+ super(LibraryLoaderWindow, self).__init__(parent)
+
+ self._initial_refresh = False
+ self._ignore_project_change = False
+
+ # Enable minimize and maximize for app
+ self.setWindowTitle(self.tool_title)
+ self.setWindowFlags(QtCore.Qt.Window)
+ self.setFocusPolicy(QtCore.Qt.StrongFocus)
+ if icon is not None:
+ self.setWindowIcon(icon)
+ # self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
+
+ body = QtWidgets.QWidget()
+ footer = QtWidgets.QWidget()
+ footer.setFixedHeight(20)
+
+ container = QtWidgets.QWidget()
+
+ self.dbcon = AvalonMongoDB()
+ self.dbcon.install()
+ self.dbcon.Session["AVALON_PROJECT"] = None
+
+ self.show_projects = show_projects
+ self.show_libraries = show_libraries
+
+ # Groups config
+ self.groups_config = tools_lib.GroupsConfig(self.dbcon)
+ self.family_config_cache = tools_lib.FamilyConfigCache(self.dbcon)
+
+ assets = AssetWidget(
+ self.dbcon, multiselection=True, parent=self
+ )
+ families = FamilyListView(
+ self.dbcon, self.family_config_cache, parent=self
+ )
+ subsets = LibrarySubsetWidget(
+ self.dbcon,
+ self.groups_config,
+ self.family_config_cache,
+ tool_name=self.tool_name,
+ parent=self
+ )
+
+ version = VersionWidget(self.dbcon)
+ thumbnail = ThumbnailWidget(self.dbcon)
+
+ # Project
+ self.combo_projects = QtWidgets.QComboBox()
+
+ # Create splitter to show / hide family filters
+ asset_filter_splitter = QtWidgets.QSplitter()
+ asset_filter_splitter.setOrientation(QtCore.Qt.Vertical)
+ asset_filter_splitter.addWidget(self.combo_projects)
+ asset_filter_splitter.addWidget(assets)
+ asset_filter_splitter.addWidget(families)
+ asset_filter_splitter.setStretchFactor(1, 65)
+ asset_filter_splitter.setStretchFactor(2, 35)
+
+ manager = ModulesManager()
+ sync_server = manager.modules_by_name["sync_server"]
+
+ representations = RepresentationWidget(self.dbcon)
+ thumb_ver_splitter = QtWidgets.QSplitter()
+ thumb_ver_splitter.setOrientation(QtCore.Qt.Vertical)
+ thumb_ver_splitter.addWidget(thumbnail)
+ thumb_ver_splitter.addWidget(version)
+ if sync_server.enabled:
+ thumb_ver_splitter.addWidget(representations)
+ thumb_ver_splitter.setStretchFactor(0, 30)
+ thumb_ver_splitter.setStretchFactor(1, 35)
+
+ container_layout = QtWidgets.QHBoxLayout(container)
+ container_layout.setContentsMargins(0, 0, 0, 0)
+ split = QtWidgets.QSplitter()
+ split.addWidget(asset_filter_splitter)
+ split.addWidget(subsets)
+ split.addWidget(thumb_ver_splitter)
+ split.setSizes([180, 950, 200])
+ container_layout.addWidget(split)
+
+ body_layout = QtWidgets.QHBoxLayout(body)
+ body_layout.addWidget(container)
+ body_layout.setContentsMargins(0, 0, 0, 0)
+
+ message = QtWidgets.QLabel()
+ message.hide()
+
+ footer_layout = QtWidgets.QVBoxLayout(footer)
+ footer_layout.addWidget(message)
+ footer_layout.setContentsMargins(0, 0, 0, 0)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.addWidget(body)
+ layout.addWidget(footer)
+
+ self.data = {
+ "widgets": {
+ "families": families,
+ "assets": assets,
+ "subsets": subsets,
+ "version": version,
+ "thumbnail": thumbnail,
+ "representations": representations
+ },
+ "label": {
+ "message": message,
+ },
+ "state": {
+ "assetIds": None
+ }
+ }
+
+ families.active_changed.connect(subsets.set_family_filters)
+ assets.selection_changed.connect(self.on_assetschanged)
+ assets.refresh_triggered.connect(self.on_assetschanged)
+ assets.view.clicked.connect(self.on_assetview_click)
+ subsets.active_changed.connect(self.on_subsetschanged)
+ subsets.version_changed.connect(self.on_versionschanged)
+ subsets.refreshed.connect(self._on_subset_refresh)
+ self.combo_projects.currentTextChanged.connect(self.on_project_change)
+
+ self.sync_server = sync_server
+
+ # Set default thumbnail on start
+ thumbnail.set_thumbnail(None)
+
+ # Defaults
+ if sync_server.enabled:
+ split.setSizes([250, 1000, 550])
+ self.resize(1800, 900)
+ else:
+ split.setSizes([250, 850, 200])
+ self.resize(1300, 700)
+
+ def showEvent(self, event):
+ super(LibraryLoaderWindow, self).showEvent(event)
+ if not self._initial_refresh:
+ self.refresh()
+
+ def on_assetview_click(self, *args):
+ subsets_widget = self.data["widgets"]["subsets"]
+ selection_model = subsets_widget.view.selectionModel()
+ if selection_model.selectedIndexes():
+ selection_model.clearSelection()
+
+ def _set_projects(self):
+ # Store current project
+ old_project_name = self.current_project
+
+ self._ignore_project_change = True
+
+ # Cleanup
+ self.combo_projects.clear()
+
+ # Fill combobox with projects
+ select_project_item = QtGui.QStandardItem("< Select project >")
+ select_project_item.setData(None, QtCore.Qt.UserRole + 1)
+
+ combobox_items = [select_project_item]
+
+ project_names = self.get_filtered_projects()
+
+ for project_name in sorted(project_names):
+ item = QtGui.QStandardItem(project_name)
+ item.setData(project_name, QtCore.Qt.UserRole + 1)
+ combobox_items.append(item)
+
+ root_item = self.combo_projects.model().invisibleRootItem()
+ root_item.appendRows(combobox_items)
+
+ index = 0
+ self._ignore_project_change = False
+
+ if old_project_name:
+ index = self.combo_projects.findText(
+ old_project_name, QtCore.Qt.MatchFixedString
+ )
+
+ self.combo_projects.setCurrentIndex(index)
+
+ def get_filtered_projects(self):
+ projects = list()
+ for project in self.dbcon.projects():
+ is_library = project.get("data", {}).get("library_project", False)
+ if (
+ (is_library and self.show_libraries) or
+ (not is_library and self.show_projects)
+ ):
+ projects.append(project["name"])
+
+ return projects
+
+ def on_project_change(self):
+ if self._ignore_project_change:
+ return
+
+ row = self.combo_projects.currentIndex()
+ index = self.combo_projects.model().index(row, 0)
+ project_name = index.data(QtCore.Qt.UserRole + 1)
+
+ self.dbcon.Session["AVALON_PROJECT"] = project_name
+
+ _config = lib.find_config()
+ if hasattr(_config, "install"):
+ _config.install()
+ else:
+ print(
+ "Config `%s` has no function `install`" % _config.__name__
+ )
+
+ subsets = self.data["widgets"]["subsets"]
+ representations = self.data["widgets"]["representations"]
+
+ subsets.on_project_change(self.dbcon.Session["AVALON_PROJECT"])
+ representations.on_project_change(self.dbcon.Session["AVALON_PROJECT"])
+
+ self.family_config_cache.refresh()
+ self.groups_config.refresh()
+
+ self._refresh_assets()
+ self._assetschanged()
+
+ project_name = self.dbcon.active_project() or "No project selected"
+ title = "{} - {}".format(self.tool_title, project_name)
+ self.setWindowTitle(title)
+
+ @property
+ def current_project(self):
+ if (
+ not self.dbcon.active_project() or
+ self.dbcon.active_project() == ""
+ ):
+ return None
+
+ return self.dbcon.active_project()
+
+ # -------------------------------
+ # Delay calling blocking methods
+ # -------------------------------
+
+ def refresh(self):
+ self.echo("Fetching results..")
+ tools_lib.schedule(self._refresh, 50, channel="mongo")
+
+ def on_assetschanged(self, *args):
+ self.echo("Fetching asset..")
+ tools_lib.schedule(self._assetschanged, 50, channel="mongo")
+
+ def on_subsetschanged(self, *args):
+ self.echo("Fetching subset..")
+ tools_lib.schedule(self._subsetschanged, 50, channel="mongo")
+
+ def on_versionschanged(self, *args):
+ self.echo("Fetching version..")
+ tools_lib.schedule(self._versionschanged, 150, channel="mongo")
+
+ def _on_subset_refresh(self, has_item):
+ subsets_widget = self.data["widgets"]["subsets"]
+ families_view = self.data["widgets"]["families"]
+
+ subsets_widget.set_loading_state(loading=False, empty=not has_item)
+ families = subsets_widget.get_subsets_families()
+ families_view.set_enabled_families(families)
+
+ def set_context(self, context, refresh=True):
+ self.echo("Setting context: {}".format(context))
+ lib.schedule(
+ lambda: self._set_context(context, refresh=refresh),
+ 50, channel="mongo"
+ )
+
+ # ------------------------------
+ def _refresh(self):
+ if not self._initial_refresh:
+ self._initial_refresh = True
+ self._set_projects()
+
+ def _refresh_assets(self):
+ """Load assets from database"""
+ if self.current_project is not None:
+ # Ensure a project is loaded
+ project_doc = self.dbcon.find_one(
+ {"type": "project"},
+ {"type": 1}
+ )
+ assert project_doc, "This is a bug"
+
+ assets_widget = self.data["widgets"]["assets"]
+ families_view = self.data["widgets"]["families"]
+ families_view.set_enabled_families(set())
+ families_view.refresh()
+
+ assets_widget.model.stop_fetch_thread()
+ assets_widget.refresh()
+ assets_widget.setFocus()
+
+ def clear_assets_underlines(self):
+ last_asset_ids = self.data["state"]["assetIds"]
+ if not last_asset_ids:
+ return
+
+ assets_widget = self.data["widgets"]["assets"]
+ id_role = assets_widget.model.ObjectIdRole
+
+ for index in tools_lib.iter_model_rows(assets_widget.model, 0):
+ if index.data(id_role) not in last_asset_ids:
+ continue
+
+ assets_widget.model.setData(
+ index, [], assets_widget.model.subsetColorsRole
+ )
+
+ def _assetschanged(self):
+ """Selected assets have changed"""
+ assets_widget = self.data["widgets"]["assets"]
+ subsets_widget = self.data["widgets"]["subsets"]
+ subsets_model = subsets_widget.model
+
+ subsets_model.clear()
+ self.clear_assets_underlines()
+
+ if not self.dbcon.Session.get("AVALON_PROJECT"):
+ subsets_widget.set_loading_state(
+ loading=False,
+ empty=True
+ )
+ return
+
+ # filter None docs they are silo
+ asset_docs = assets_widget.get_selected_assets()
+ if len(asset_docs) == 0:
+ return
+
+ asset_ids = [asset_doc["_id"] for asset_doc in asset_docs]
+ # Start loading
+ subsets_widget.set_loading_state(
+ loading=bool(asset_ids),
+ empty=True
+ )
+
+ subsets_model.set_assets(asset_ids)
+ subsets_widget.view.setColumnHidden(
+ subsets_model.Columns.index("asset"),
+ len(asset_ids) < 2
+ )
+
+ # Clear the version information on asset change
+ self.data["widgets"]["version"].set_version(None)
+ self.data["widgets"]["thumbnail"].set_thumbnail(asset_docs)
+
+ self.data["state"]["assetIds"] = asset_ids
+
+ representations = self.data["widgets"]["representations"]
+ # reset repre list
+ representations.set_version_ids([])
+
+ def _subsetschanged(self):
+ asset_ids = self.data["state"]["assetIds"]
+ # Skip setting colors if not asset multiselection
+ if not asset_ids or len(asset_ids) < 2:
+ self._versionschanged()
+ return
+
+ subsets = self.data["widgets"]["subsets"]
+ selected_subsets = subsets.selected_subsets(_merged=True, _other=False)
+
+ asset_models = {}
+ asset_ids = []
+ for subset_node in selected_subsets:
+ asset_ids.extend(subset_node.get("assetIds", []))
+ asset_ids = set(asset_ids)
+
+ for subset_node in selected_subsets:
+ for asset_id in asset_ids:
+ if asset_id not in asset_models:
+ asset_models[asset_id] = []
+
+ color = None
+ if asset_id in subset_node.get("assetIds", []):
+ color = subset_node["subsetColor"]
+
+ asset_models[asset_id].append(color)
+
+ self.clear_assets_underlines()
+
+ assets_widget = self.data["widgets"]["assets"]
+ indexes = assets_widget.view.selectionModel().selectedRows()
+
+ for index in indexes:
+ id = index.data(assets_widget.model.ObjectIdRole)
+ if id not in asset_models:
+ continue
+
+ assets_widget.model.setData(
+ index, asset_models[id], assets_widget.model.subsetColorsRole
+ )
+ # Trigger repaint
+ assets_widget.view.updateGeometries()
+ # Set version in Version Widget
+ self._versionschanged()
+
+ def _versionschanged(self):
+
+ subsets = self.data["widgets"]["subsets"]
+ selection = subsets.view.selectionModel()
+
+ # Active must be in the selected rows otherwise we
+ # assume it's not actually an "active" current index.
+ version_docs = None
+ version_doc = None
+ active = selection.currentIndex()
+ rows = selection.selectedRows(column=active.column())
+ if active and active in rows:
+ item = active.data(subsets.model.ItemRole)
+ if (
+ item is not None
+ and not (item.get("isGroup") or item.get("isMerged"))
+ ):
+ version_doc = item["version_document"]
+
+ if rows:
+ version_docs = []
+ for index in rows:
+ if not index or not index.isValid():
+ continue
+ item = index.data(subsets.model.ItemRole)
+ if (
+ item is None
+ or item.get("isGroup")
+ or item.get("isMerged")
+ ):
+ continue
+ version_docs.append(item["version_document"])
+
+ self.data["widgets"]["version"].set_version(version_doc)
+
+ thumbnail_docs = version_docs
+ if not thumbnail_docs:
+ assets_widget = self.data["widgets"]["assets"]
+ asset_docs = assets_widget.get_selected_assets()
+ if len(asset_docs) > 0:
+ thumbnail_docs = asset_docs
+
+ self.data["widgets"]["thumbnail"].set_thumbnail(thumbnail_docs)
+
+ representations = self.data["widgets"]["representations"]
+ version_ids = [doc["_id"] for doc in version_docs or []]
+ representations.set_version_ids(version_ids)
+
+ def _set_context(self, context, refresh=True):
+ """Set the selection in the interface using a context.
+ The context must contain `asset` data by name.
+ Note: Prior to setting context ensure `refresh` is triggered so that
+ the "silos" are listed correctly, aside from that setting the
+ context will force a refresh further down because it changes
+ the active silo and asset.
+ Args:
+ context (dict): The context to apply.
+ Returns:
+ None
+ """
+
+ asset = context.get("asset", None)
+ if asset is None:
+ return
+
+ if refresh:
+ # Workaround:
+ # Force a direct (non-scheduled) refresh prior to setting the
+ # asset widget's silo and asset selection to ensure it's correctly
+ # displaying the silo tabs. Calling `window.refresh()` and directly
+ # `window.set_context()` the `set_context()` seems to override the
+ # scheduled refresh and the silo tabs are not shown.
+ self._refresh_assets()
+
+ asset_widget = self.data["widgets"]["assets"]
+ asset_widget.select_assets(asset)
+
+ def echo(self, message):
+ widget = self.data["label"]["message"]
+ widget.setText(str(message))
+ widget.show()
+ print(message)
+
+ tools_lib.schedule(widget.hide, 5000, channel="message")
+
+ def closeEvent(self, event):
+ # Kill on holding SHIFT
+ modifiers = QtWidgets.QApplication.queryKeyboardModifiers()
+ shift_pressed = QtCore.Qt.ShiftModifier & modifiers
+
+ if shift_pressed:
+ print("Force quitted..")
+ self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
+
+ print("Good bye")
+ return super(LibraryLoaderWindow, self).closeEvent(event)
+
+
+def show(
+ debug=False, parent=None, icon=None,
+ show_projects=False, show_libraries=True
+):
+ """Display Loader GUI
+
+ Arguments:
+ debug (bool, optional): Run loader in debug-mode,
+ defaults to False
+ parent (QtCore.QObject, optional): The Qt object to parent to.
+ use_context (bool): Whether to apply the current context upon launch
+
+ """
+ # Remember window
+ if module.window is not None:
+ try:
+ module.window.show()
+
+ # If the window is minimized then unminimize it.
+ if module.window.windowState() & QtCore.Qt.WindowMinimized:
+ module.window.setWindowState(QtCore.Qt.WindowActive)
+
+ # Raise and activate the window
+ module.window.raise_() # for MacOS
+ module.window.activateWindow() # for Windows
+ module.window.refresh()
+ return
+ except RuntimeError as e:
+ if not e.message.rstrip().endswith("already deleted."):
+ raise
+
+ # Garbage collected
+ module.window = None
+
+ if debug:
+ import traceback
+ sys.excepthook = lambda typ, val, tb: traceback.print_last()
+
+ with tools_lib.application():
+ window = LibraryLoaderWindow(
+ parent, icon, show_projects, show_libraries
+ )
+ window.setStyleSheet(style.load_stylesheet())
+ window.show()
+
+ module.window = window
+
+
+def cli(args):
+
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("project")
+
+ show(show_projects=True, show_libraries=True)
diff --git a/openpype/tools/libraryloader/lib.py b/openpype/tools/libraryloader/lib.py
new file mode 100644
index 0000000000..6a497a6a16
--- /dev/null
+++ b/openpype/tools/libraryloader/lib.py
@@ -0,0 +1,33 @@
+import os
+import importlib
+import logging
+from openpype.api import Anatomy
+
+log = logging.getLogger(__name__)
+
+
+# `find_config` from `pipeline`
+def find_config():
+ log.info("Finding configuration for project..")
+
+ config = os.environ["AVALON_CONFIG"]
+
+ if not config:
+ raise EnvironmentError(
+ "No configuration found in "
+ "the project nor environment"
+ )
+
+ log.info("Found %s, loading.." % config)
+ return importlib.import_module(config)
+
+
+class RegisteredRoots:
+ roots_per_project = {}
+
+ @classmethod
+ def registered_root(cls, project_name):
+ if project_name not in cls.roots_per_project:
+ cls.roots_per_project[project_name] = Anatomy(project_name).roots
+
+ return cls.roots_per_project[project_name]
diff --git a/openpype/tools/libraryloader/widgets.py b/openpype/tools/libraryloader/widgets.py
new file mode 100644
index 0000000000..45f9ea2048
--- /dev/null
+++ b/openpype/tools/libraryloader/widgets.py
@@ -0,0 +1,18 @@
+from Qt import QtWidgets
+
+from .lib import RegisteredRoots
+from openpype.tools.loader.widgets import SubsetWidget
+
+
+class LibrarySubsetWidget(SubsetWidget):
+ def on_copy_source(self):
+ """Copy formatted source path to clipboard"""
+ source = self.data.get("source", None)
+ if not source:
+ return
+
+ project_name = self.dbcon.Session["AVALON_PROJECT"]
+ root = RegisteredRoots.registered_root(project_name)
+ path = source.format(root=root)
+ clipboard = QtWidgets.QApplication.clipboard()
+ clipboard.setText(path)
diff --git a/openpype/tools/loader/__init__.py b/openpype/tools/loader/__init__.py
new file mode 100644
index 0000000000..a5fda8f018
--- /dev/null
+++ b/openpype/tools/loader/__init__.py
@@ -0,0 +1,11 @@
+from .app import (
+ LoaderWindow,
+ show,
+ cli,
+)
+
+__all__ = (
+ "LoaderWindow",
+ "show",
+ "cli",
+)
diff --git a/openpype/tools/loader/__main__.py b/openpype/tools/loader/__main__.py
new file mode 100644
index 0000000000..146ba7fd10
--- /dev/null
+++ b/openpype/tools/loader/__main__.py
@@ -0,0 +1,33 @@
+"""Main entrypoint for standalone debugging
+
+ Used for running 'avalon.tool.loader.__main__' as a module (-m), useful for
+ debugging without need to start host.
+
+ Modify AVALON_MONGO accordingly
+"""
+import os
+import sys
+from . import cli
+
+
+def my_exception_hook(exctype, value, traceback):
+ # Print the error and traceback
+ print(exctype, value, traceback)
+ # Call the normal Exception hook after
+ sys._excepthook(exctype, value, traceback)
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ os.environ["AVALON_MONGO"] = "mongodb://localhost:27017"
+ os.environ["OPENPYPE_MONGO"] = "mongodb://localhost:27017"
+ os.environ["AVALON_DB"] = "avalon"
+ os.environ["AVALON_TIMEOUT"] = "1000"
+ os.environ["OPENPYPE_DEBUG"] = "1"
+ os.environ["AVALON_CONFIG"] = "pype"
+ os.environ["AVALON_ASSET"] = "Jungle"
+
+ # Set the exception hook to our wrapping function
+ sys.excepthook = my_exception_hook
+
+ sys.exit(cli(sys.argv[1:]))
diff --git a/openpype/tools/loader/app.py b/openpype/tools/loader/app.py
new file mode 100644
index 0000000000..c18b6e798a
--- /dev/null
+++ b/openpype/tools/loader/app.py
@@ -0,0 +1,676 @@
+import sys
+
+from Qt import QtWidgets, QtCore
+from avalon import api, io, style, pipeline
+
+from openpype.tools.utils.widgets import AssetWidget
+
+from openpype.tools.utils import lib
+
+from .widgets import (
+ SubsetWidget,
+ VersionWidget,
+ FamilyListView,
+ ThumbnailWidget,
+ RepresentationWidget,
+ OverlayFrame
+)
+
+from openpype.modules import ModulesManager
+
+module = sys.modules[__name__]
+module.window = None
+
+
+# Register callback on task change
+# - callback can't be defined in Window as it is weak reference callback
+# so `WeakSet` will remove it immidiatelly
+def on_context_task_change(*args, **kwargs):
+ if module.window:
+ module.window.on_context_task_change(*args, **kwargs)
+
+
+pipeline.on("taskChanged", on_context_task_change)
+
+
+class LoaderWindow(QtWidgets.QDialog):
+ """Asset loader interface"""
+
+ tool_name = "loader"
+
+ def __init__(self, parent=None):
+ super(LoaderWindow, self).__init__(parent)
+ title = "Asset Loader 2.1"
+ project_name = api.Session.get("AVALON_PROJECT")
+ if project_name:
+ title += " - {}".format(project_name)
+ self.setWindowTitle(title)
+
+ # Groups config
+ self.groups_config = lib.GroupsConfig(io)
+ self.family_config_cache = lib.FamilyConfigCache(io)
+
+ # Enable minimize and maximize for app
+ self.setWindowFlags(QtCore.Qt.Window)
+ self.setFocusPolicy(QtCore.Qt.StrongFocus)
+
+ body = QtWidgets.QWidget()
+ footer = QtWidgets.QWidget()
+ footer.setFixedHeight(20)
+
+ container = QtWidgets.QWidget()
+
+ assets = AssetWidget(io, multiselection=True, parent=self)
+ assets.set_current_asset_btn_visibility(True)
+
+ families = FamilyListView(io, self.family_config_cache, self)
+ subsets = SubsetWidget(
+ io,
+ self.groups_config,
+ self.family_config_cache,
+ tool_name=self.tool_name,
+ parent=self
+ )
+ version = VersionWidget(io)
+ thumbnail = ThumbnailWidget(io)
+ representations = RepresentationWidget(io, self.tool_name)
+
+ manager = ModulesManager()
+ sync_server = manager.modules_by_name["sync_server"]
+
+ thumb_ver_splitter = QtWidgets.QSplitter()
+ thumb_ver_splitter.setOrientation(QtCore.Qt.Vertical)
+ thumb_ver_splitter.addWidget(thumbnail)
+ thumb_ver_splitter.addWidget(version)
+ if sync_server.enabled:
+ thumb_ver_splitter.addWidget(representations)
+ thumb_ver_splitter.setStretchFactor(0, 30)
+ thumb_ver_splitter.setStretchFactor(1, 35)
+
+ # Create splitter to show / hide family filters
+ asset_filter_splitter = QtWidgets.QSplitter()
+ asset_filter_splitter.setOrientation(QtCore.Qt.Vertical)
+ asset_filter_splitter.addWidget(assets)
+ asset_filter_splitter.addWidget(families)
+ asset_filter_splitter.setStretchFactor(0, 65)
+ asset_filter_splitter.setStretchFactor(1, 35)
+
+ container_layout = QtWidgets.QHBoxLayout(container)
+ container_layout.setContentsMargins(0, 0, 0, 0)
+ split = QtWidgets.QSplitter()
+ split.addWidget(asset_filter_splitter)
+ split.addWidget(subsets)
+ split.addWidget(thumb_ver_splitter)
+
+ container_layout.addWidget(split)
+
+ body_layout = QtWidgets.QHBoxLayout(body)
+ body_layout.addWidget(container)
+ body_layout.setContentsMargins(0, 0, 0, 0)
+
+ message = QtWidgets.QLabel()
+ message.hide()
+
+ footer_layout = QtWidgets.QVBoxLayout(footer)
+ footer_layout.addWidget(message)
+ footer_layout.setContentsMargins(0, 0, 0, 0)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.addWidget(body)
+ layout.addWidget(footer)
+
+ self.data = {
+ "widgets": {
+ "families": families,
+ "assets": assets,
+ "subsets": subsets,
+ "version": version,
+ "thumbnail": thumbnail,
+ "representations": representations
+ },
+ "label": {
+ "message": message,
+ },
+ "state": {
+ "assetIds": None
+ }
+ }
+
+ overlay_frame = OverlayFrame("Loading...", self)
+ overlay_frame.setVisible(False)
+
+ families.active_changed.connect(subsets.set_family_filters)
+ assets.selection_changed.connect(self.on_assetschanged)
+ assets.refresh_triggered.connect(self.on_assetschanged)
+ assets.view.clicked.connect(self.on_assetview_click)
+ subsets.active_changed.connect(self.on_subsetschanged)
+ subsets.version_changed.connect(self.on_versionschanged)
+ subsets.refreshed.connect(self._on_subset_refresh)
+
+ subsets.load_started.connect(self._on_load_start)
+ subsets.load_ended.connect(self._on_load_end)
+ representations.load_started.connect(self._on_load_start)
+ representations.load_ended.connect(self._on_load_end)
+
+ self._overlay_frame = overlay_frame
+
+ self.family_config_cache.refresh()
+ self.groups_config.refresh()
+
+ self._refresh()
+ self._assetschanged()
+
+ # Defaults
+ if sync_server.enabled:
+ split.setSizes([250, 1000, 550])
+ self.resize(1800, 900)
+ else:
+ split.setSizes([250, 850, 200])
+ self.resize(1300, 700)
+
+ def resizeEvent(self, event):
+ super(LoaderWindow, self).resizeEvent(event)
+ self._overlay_frame.resize(self.size())
+
+ def moveEvent(self, event):
+ super(LoaderWindow, self).moveEvent(event)
+ self._overlay_frame.move(0, 0)
+
+ # -------------------------------
+ # Delay calling blocking methods
+ # -------------------------------
+
+ def on_assetview_click(self, *args):
+ subsets_widget = self.data["widgets"]["subsets"]
+ selection_model = subsets_widget.view.selectionModel()
+ if selection_model.selectedIndexes():
+ selection_model.clearSelection()
+
+ def refresh(self):
+ self.echo("Fetching results..")
+ lib.schedule(self._refresh, 50, channel="mongo")
+
+ def on_assetschanged(self, *args):
+ self.echo("Fetching asset..")
+ lib.schedule(self._assetschanged, 50, channel="mongo")
+
+ def on_subsetschanged(self, *args):
+ self.echo("Fetching subset..")
+ lib.schedule(self._subsetschanged, 50, channel="mongo")
+
+ def on_versionschanged(self, *args):
+ self.echo("Fetching version..")
+ lib.schedule(self._versionschanged, 150, channel="mongo")
+
+ def set_context(self, context, refresh=True):
+ self.echo("Setting context: {}".format(context))
+ lib.schedule(lambda: self._set_context(context, refresh=refresh),
+ 50, channel="mongo")
+
+ def _on_load_start(self):
+ # Show overlay and process events so it's repainted
+ self._overlay_frame.setVisible(True)
+ QtWidgets.QApplication.processEvents()
+
+ def _hide_overlay(self):
+ self._overlay_frame.setVisible(False)
+
+ def _on_subset_refresh(self, has_item):
+ subsets_widget = self.data["widgets"]["subsets"]
+ families_view = self.data["widgets"]["families"]
+
+ subsets_widget.set_loading_state(loading=False, empty=not has_item)
+ families = subsets_widget.get_subsets_families()
+ families_view.set_enabled_families(families)
+
+ def _on_load_end(self):
+ # Delay hiding as click events happened during loading should be
+ # blocked
+ QtCore.QTimer.singleShot(100, self._hide_overlay)
+
+ # ------------------------------
+
+ def on_context_task_change(self, *args, **kwargs):
+ assets_widget = self.data["widgets"]["assets"]
+ families_view = self.data["widgets"]["families"]
+ # Refresh families config
+ families_view.refresh()
+ # Change to context asset on context change
+ assets_widget.select_assets(io.Session["AVALON_ASSET"])
+
+ def _refresh(self):
+ """Load assets from database"""
+
+ # Ensure a project is loaded
+ project = io.find_one({"type": "project"}, {"type": 1})
+ assert project, "Project was not found! This is a bug"
+
+ assets_widget = self.data["widgets"]["assets"]
+ assets_widget.refresh()
+ assets_widget.setFocus()
+
+ families_view = self.data["widgets"]["families"]
+ families_view.refresh()
+
+ def clear_assets_underlines(self):
+ """Clear colors from asset data to remove colored underlines
+ When multiple assets are selected colored underlines mark which asset
+ own selected subsets. These colors must be cleared from asset data
+ on selection change so they match current selection.
+ """
+ last_asset_ids = self.data["state"]["assetIds"]
+ if not last_asset_ids:
+ return
+
+ assets_widget = self.data["widgets"]["assets"]
+ id_role = assets_widget.model.ObjectIdRole
+
+ for index in lib.iter_model_rows(assets_widget.model, 0):
+ if index.data(id_role) not in last_asset_ids:
+ continue
+
+ assets_widget.model.setData(
+ index, [], assets_widget.model.subsetColorsRole
+ )
+
+ def _assetschanged(self):
+ """Selected assets have changed"""
+ assets_widget = self.data["widgets"]["assets"]
+ subsets_widget = self.data["widgets"]["subsets"]
+ subsets_model = subsets_widget.model
+
+ subsets_model.clear()
+ self.clear_assets_underlines()
+
+ # filter None docs they are silo
+ asset_docs = assets_widget.get_selected_assets()
+
+ asset_ids = [asset_doc["_id"] for asset_doc in asset_docs]
+ # Start loading
+ subsets_widget.set_loading_state(
+ loading=bool(asset_ids),
+ empty=True
+ )
+
+ subsets_model.set_assets(asset_ids)
+ subsets_widget.view.setColumnHidden(
+ subsets_model.Columns.index("asset"),
+ len(asset_ids) < 2
+ )
+
+ # Clear the version information on asset change
+ self.data["widgets"]["version"].set_version(None)
+ self.data["widgets"]["thumbnail"].set_thumbnail(asset_docs)
+
+ self.data["state"]["assetIds"] = asset_ids
+
+ representations = self.data["widgets"]["representations"]
+ # reset repre list
+ representations.set_version_ids([])
+
+ def _subsetschanged(self):
+ asset_ids = self.data["state"]["assetIds"]
+ # Skip setting colors if not asset multiselection
+ if not asset_ids or len(asset_ids) < 2:
+ self._versionschanged()
+ return
+
+ subsets = self.data["widgets"]["subsets"]
+ selected_subsets = subsets.selected_subsets(_merged=True, _other=False)
+
+ asset_models = {}
+ asset_ids = []
+ for subset_node in selected_subsets:
+ asset_ids.extend(subset_node.get("assetIds", []))
+ asset_ids = set(asset_ids)
+
+ for subset_node in selected_subsets:
+ for asset_id in asset_ids:
+ if asset_id not in asset_models:
+ asset_models[asset_id] = []
+
+ color = None
+ if asset_id in subset_node.get("assetIds", []):
+ color = subset_node["subsetColor"]
+
+ asset_models[asset_id].append(color)
+
+ self.clear_assets_underlines()
+
+ assets_widget = self.data["widgets"]["assets"]
+ indexes = assets_widget.view.selectionModel().selectedRows()
+
+ for index in indexes:
+ id = index.data(assets_widget.model.ObjectIdRole)
+ if id not in asset_models:
+ continue
+
+ assets_widget.model.setData(
+ index, asset_models[id], assets_widget.model.subsetColorsRole
+ )
+ # Trigger repaint
+ assets_widget.view.updateGeometries()
+ # Set version in Version Widget
+ self._versionschanged()
+
+ def _versionschanged(self):
+ subsets = self.data["widgets"]["subsets"]
+ selection = subsets.view.selectionModel()
+
+ # Active must be in the selected rows otherwise we
+ # assume it's not actually an "active" current index.
+ version_docs = None
+ version_doc = None
+ active = selection.currentIndex()
+ rows = selection.selectedRows(column=active.column())
+ if active:
+ if active in rows:
+ item = active.data(subsets.model.ItemRole)
+ if (
+ item is not None and
+ not (item.get("isGroup") or item.get("isMerged"))
+ ):
+ version_doc = item["version_document"]
+
+ if rows:
+ version_docs = []
+ for index in rows:
+ if not index or not index.isValid():
+ continue
+ item = index.data(subsets.model.ItemRole)
+ if item is None:
+ continue
+ if item.get("isGroup") or item.get("isMerged"):
+ for child in item.children():
+ version_docs.append(child["version_document"])
+ else:
+ version_docs.append(item["version_document"])
+
+ self.data["widgets"]["version"].set_version(version_doc)
+
+ thumbnail_docs = version_docs
+ assets_widget = self.data["widgets"]["assets"]
+ asset_docs = assets_widget.get_selected_assets()
+ if not thumbnail_docs:
+ if len(asset_docs) > 0:
+ thumbnail_docs = asset_docs
+
+ self.data["widgets"]["thumbnail"].set_thumbnail(thumbnail_docs)
+
+ representations = self.data["widgets"]["representations"]
+ version_ids = [doc["_id"] for doc in version_docs or []]
+ representations.set_version_ids(version_ids)
+
+ # representations.change_visibility("subset", len(rows) > 1)
+ # representations.change_visibility("asset", len(asset_docs) > 1)
+
+ def _set_context(self, context, refresh=True):
+ """Set the selection in the interface using a context.
+
+ The context must contain `asset` data by name.
+
+ Note: Prior to setting context ensure `refresh` is triggered so that
+ the "silos" are listed correctly, aside from that setting the
+ context will force a refresh further down because it changes
+ the active silo and asset.
+
+ Args:
+ context (dict): The context to apply.
+
+ Returns:
+ None
+
+ """
+
+ asset = context.get("asset", None)
+ if asset is None:
+ return
+
+ if refresh:
+ # Workaround:
+ # Force a direct (non-scheduled) refresh prior to setting the
+ # asset widget's silo and asset selection to ensure it's correctly
+ # displaying the silo tabs. Calling `window.refresh()` and directly
+ # `window.set_context()` the `set_context()` seems to override the
+ # scheduled refresh and the silo tabs are not shown.
+ self._refresh()
+
+ asset_widget = self.data["widgets"]["assets"]
+ asset_widget.select_assets(asset)
+
+ def echo(self, message):
+ widget = self.data["label"]["message"]
+ widget.setText(str(message))
+ widget.show()
+ print(message)
+
+ lib.schedule(widget.hide, 5000, channel="message")
+
+ def closeEvent(self, event):
+ # Kill on holding SHIFT
+ modifiers = QtWidgets.QApplication.queryKeyboardModifiers()
+ shift_pressed = QtCore.Qt.ShiftModifier & modifiers
+
+ if shift_pressed:
+ print("Force quitted..")
+ self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
+
+ print("Good bye")
+ return super(LoaderWindow, self).closeEvent(event)
+
+ def keyPressEvent(self, event):
+ modifiers = event.modifiers()
+ ctrl_pressed = QtCore.Qt.ControlModifier & modifiers
+
+ # Grouping subsets on pressing Ctrl + G
+ if (ctrl_pressed and event.key() == QtCore.Qt.Key_G and
+ not event.isAutoRepeat()):
+ self.show_grouping_dialog()
+ return
+
+ super(LoaderWindow, self).keyPressEvent(event)
+ event.setAccepted(True) # Avoid interfering other widgets
+
+ def show_grouping_dialog(self):
+ subsets = self.data["widgets"]["subsets"]
+ if not subsets.is_groupable():
+ self.echo("Grouping not enabled.")
+ return
+
+ selected = []
+ merged_items = []
+ for item in subsets.selected_subsets(_merged=True):
+ if item.get("isMerged"):
+ merged_items.append(item)
+ else:
+ selected.append(item)
+
+ for merged_item in merged_items:
+ for child_item in merged_item.children():
+ selected.append(child_item)
+
+ if not selected:
+ self.echo("No selected subset.")
+ return
+
+ dialog = SubsetGroupingDialog(
+ items=selected, groups_config=self.groups_config, parent=self
+ )
+ dialog.grouped.connect(self._assetschanged)
+ dialog.show()
+
+
+class SubsetGroupingDialog(QtWidgets.QDialog):
+ grouped = QtCore.Signal()
+
+ def __init__(self, items, groups_config, parent=None):
+ super(SubsetGroupingDialog, self).__init__(parent=parent)
+ self.setWindowTitle("Grouping Subsets")
+ self.setMinimumWidth(250)
+ self.setModal(True)
+
+ self.items = items
+ self.groups_config = groups_config
+ self.subsets = parent.data["widgets"]["subsets"]
+ self.asset_ids = parent.data["state"]["assetIds"]
+
+ name = QtWidgets.QLineEdit()
+ name.setPlaceholderText("Remain blank to ungroup..")
+
+ # Menu for pre-defined subset groups
+ name_button = QtWidgets.QPushButton()
+ name_button.setFixedWidth(18)
+ name_button.setFixedHeight(20)
+ name_menu = QtWidgets.QMenu(name_button)
+ name_button.setMenu(name_menu)
+
+ name_layout = QtWidgets.QHBoxLayout()
+ name_layout.addWidget(name)
+ name_layout.addWidget(name_button)
+ name_layout.setContentsMargins(0, 0, 0, 0)
+
+ group_btn = QtWidgets.QPushButton("Apply")
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.addWidget(QtWidgets.QLabel("Group Name"))
+ layout.addLayout(name_layout)
+ layout.addWidget(group_btn)
+
+ group_btn.clicked.connect(self.on_group)
+ group_btn.setAutoDefault(True)
+ group_btn.setDefault(True)
+
+ self.name = name
+ self.name_menu = name_menu
+
+ self._build_menu()
+
+ def _build_menu(self):
+ menu = self.name_menu
+ button = menu.parent()
+ # Get and destroy the action group
+ group = button.findChild(QtWidgets.QActionGroup)
+ if group:
+ group.deleteLater()
+
+ active_groups = self.groups_config.active_groups(self.asset_ids)
+
+ # Build new action group
+ group = QtWidgets.QActionGroup(button)
+ group_names = list()
+ for data in sorted(active_groups, key=lambda x: x["order"]):
+ name = data["name"]
+ if name in group_names:
+ continue
+ group_names.append(name)
+ icon = data["icon"]
+
+ action = group.addAction(name)
+ action.setIcon(icon)
+ menu.addAction(action)
+
+ group.triggered.connect(self._on_action_clicked)
+ button.setEnabled(not menu.isEmpty())
+
+ def _on_action_clicked(self, action):
+ self.name.setText(action.text())
+
+ def on_group(self):
+ name = self.name.text().strip()
+ self.subsets.group_subsets(name, self.asset_ids, self.items)
+
+ with lib.preserve_selection(tree_view=self.subsets.view,
+ current_index=False):
+ self.grouped.emit()
+ self.close()
+
+
+def show(debug=False, parent=None, use_context=False):
+ """Display Loader GUI
+
+ Arguments:
+ debug (bool, optional): Run loader in debug-mode,
+ defaults to False
+ parent (QtCore.QObject, optional): The Qt object to parent to.
+ use_context (bool): Whether to apply the current context upon launch
+
+ """
+
+ # Remember window
+ if module.window is not None:
+ try:
+ module.window.show()
+
+ # If the window is minimized then unminimize it.
+ if module.window.windowState() & QtCore.Qt.WindowMinimized:
+ module.window.setWindowState(QtCore.Qt.WindowActive)
+
+ # Raise and activate the window
+ module.window.raise_() # for MacOS
+ module.window.activateWindow() # for Windows
+ module.window.refresh()
+ return
+ except (AttributeError, RuntimeError):
+ # Garbage collected
+ module.window = None
+
+ if debug:
+ import traceback
+ sys.excepthook = lambda typ, val, tb: traceback.print_last()
+
+ io.install()
+
+ any_project = next(
+ project for project in io.projects()
+ if project.get("active", True) is not False
+ )
+
+ api.Session["AVALON_PROJECT"] = any_project["name"]
+ module.project = any_project["name"]
+
+ with lib.application():
+ window = LoaderWindow(parent)
+ window.setStyleSheet(style.load_stylesheet())
+ window.show()
+
+ if use_context:
+ context = {"asset": api.Session["AVALON_ASSET"]}
+ window.set_context(context, refresh=True)
+ else:
+ window.refresh()
+
+ module.window = window
+
+ # Pull window to the front.
+ module.window.raise_()
+ module.window.activateWindow()
+
+
+def cli(args):
+
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("project")
+
+ args = parser.parse_args(args)
+ project = args.project
+
+ print("Entering Project: %s" % project)
+
+ io.install()
+
+ # Store settings
+ api.Session["AVALON_PROJECT"] = project
+
+ from avalon import pipeline
+
+ # Find the set config
+ _config = pipeline.find_config()
+ if hasattr(_config, "install"):
+ _config.install()
+ else:
+ print("Config `%s` has no function `install`" %
+ _config.__name__)
+
+ show()
diff --git a/openpype/tools/loader/images/default_thumbnail.png b/openpype/tools/loader/images/default_thumbnail.png
new file mode 100644
index 0000000000..97bd958e0d
Binary files /dev/null and b/openpype/tools/loader/images/default_thumbnail.png differ
diff --git a/openpype/tools/loader/lib.py b/openpype/tools/loader/lib.py
new file mode 100644
index 0000000000..14ebab6c85
--- /dev/null
+++ b/openpype/tools/loader/lib.py
@@ -0,0 +1,190 @@
+import inspect
+from Qt import QtGui
+
+from avalon.vendor import qtawesome
+from openpype.tools.utils.widgets import (
+ OptionalAction,
+ OptionDialog
+)
+
+
+def change_visibility(model, view, column_name, visible):
+ """
+ Hides or shows particular 'column_name'.
+
+ "asset" and "subset" columns should be visible only in multiselect
+ """
+ index = model.Columns.index(column_name)
+ view.setColumnHidden(index, not visible)
+
+
+def get_selected_items(rows, item_role):
+ items = []
+ for row_index in rows:
+ item = row_index.data(item_role)
+ if item.get("isGroup"):
+ continue
+
+ elif item.get("isMerged"):
+ for idx in range(row_index.model().rowCount(row_index)):
+ child_index = row_index.child(idx, 0)
+ item = child_index.data(item_role)
+ if item not in items:
+ items.append(item)
+
+ else:
+ if item not in items:
+ items.append(item)
+ return items
+
+
+def get_options(action, loader, parent, repre_contexts):
+ """Provides dialog to select value from loader provided options.
+
+ Loader can provide static or dynamically created options based on
+ qargparse variants.
+
+ Args:
+ action (OptionalAction) - action in menu
+ loader (cls of api.Loader) - not initilized yet
+ parent (Qt element to parent dialog to)
+ repre_contexts (list) of dict with full info about selected repres
+ Returns:
+ (dict) - selected value from OptionDialog
+ None when dialog was closed or cancelled, in all other cases {}
+ if no options
+ """
+ # Pop option dialog
+ options = {}
+ loader_options = loader.get_options(repre_contexts)
+ if getattr(action, "optioned", False) and loader_options:
+ dialog = OptionDialog(parent)
+ dialog.setWindowTitle(action.label + " Options")
+ dialog.create(loader_options)
+
+ if not dialog.exec_():
+ return None
+
+ # Get option
+ options = dialog.parse()
+
+ return options
+
+
+def add_representation_loaders_to_menu(loaders, menu, repre_contexts):
+ """
+ Loops through provider loaders and adds them to 'menu'.
+
+ Expects loaders sorted in requested order.
+ Expects loaders de-duplicated if wanted.
+
+ Args:
+ loaders(tuple): representation - loader
+ menu (OptionalMenu):
+ repre_contexts (dict): full info about representations (contains
+ their repre_doc, asset_doc, subset_doc, version_doc),
+ keys are repre_ids
+
+ Returns:
+ menu (OptionalMenu): with new items
+ """
+ # List the available loaders
+ for representation, loader in loaders:
+ label = None
+ repre_context = None
+ if representation:
+ label = representation.get("custom_label")
+ repre_context = repre_contexts[representation["_id"]]
+
+ if not label:
+ label = get_label_from_loader(loader, representation)
+
+ icon = get_icon_from_loader(loader)
+
+ loader_options = loader.get_options([repre_context])
+
+ use_option = bool(loader_options)
+ action = OptionalAction(label, icon, use_option, menu)
+ if use_option:
+ # Add option box tip
+ action.set_option_tip(loader_options)
+
+ action.setData((representation, loader))
+
+ # Add tooltip and statustip from Loader docstring
+ tip = inspect.getdoc(loader)
+ if tip:
+ action.setToolTip(tip)
+ action.setStatusTip(tip)
+
+ menu.addAction(action)
+
+ return menu
+
+
+def remove_tool_name_from_loaders(available_loaders, tool_name):
+ if not tool_name:
+ return available_loaders
+ filtered_loaders = []
+ for loader in available_loaders:
+ if hasattr(loader, "tool_names"):
+ if not ("*" in loader.tool_names or
+ tool_name in loader.tool_names):
+ continue
+ filtered_loaders.append(loader)
+ return filtered_loaders
+
+
+def get_icon_from_loader(loader):
+ """Pull icon info from loader class"""
+ # Support font-awesome icons using the `.icon` and `.color`
+ # attributes on plug-ins.
+ icon = getattr(loader, "icon", None)
+ if icon is not None:
+ try:
+ key = "fa.{0}".format(icon)
+ color = getattr(loader, "color", "white")
+ icon = qtawesome.icon(key, color=color)
+ except Exception as e:
+ print("Unable to set icon for loader "
+ "{}: {}".format(loader, e))
+ icon = None
+ return icon
+
+
+def get_label_from_loader(loader, representation=None):
+ """Pull label info from loader class"""
+ label = getattr(loader, "label", None)
+ if label is None:
+ label = loader.__name__
+ if representation:
+ # Add the representation as suffix
+ label = "{0} ({1})".format(label, representation['name'])
+ return label
+
+
+def get_no_loader_action(menu, one_item_selected=False):
+ """Creates dummy no loader option in 'menu'"""
+ submsg = "your selection."
+ if one_item_selected:
+ submsg = "this version."
+ msg = "No compatible loaders for {}".format(submsg)
+ print(msg)
+ icon = qtawesome.icon(
+ "fa.exclamation",
+ color=QtGui.QColor(255, 51, 0)
+ )
+ action = OptionalAction(("*" + msg), icon, False, menu)
+ return action
+
+
+def sort_loaders(loaders, custom_sorter=None):
+ def sorter(value):
+ """Sort the Loaders by their order and then their name"""
+ Plugin = value[1]
+ return Plugin.order, Plugin.__name__
+
+ if not custom_sorter:
+ custom_sorter = sorter
+
+ return sorted(loaders, key=custom_sorter)
diff --git a/openpype/tools/loader/model.py b/openpype/tools/loader/model.py
new file mode 100644
index 0000000000..6e9c7bf220
--- /dev/null
+++ b/openpype/tools/loader/model.py
@@ -0,0 +1,1195 @@
+import copy
+import re
+import math
+
+from avalon import (
+ style,
+ schema
+)
+from Qt import QtCore, QtGui
+
+from avalon.vendor import qtawesome
+from avalon.lib import HeroVersionType
+
+from openpype.tools.utils.models import TreeModel, Item
+from openpype.tools.utils import lib
+
+from openpype.modules import ModulesManager
+
+
+def is_filtering_recursible():
+ """Does Qt binding support recursive filtering for QSortFilterProxyModel?
+
+ (NOTE) Recursive filtering was introduced in Qt 5.10.
+
+ """
+ return hasattr(QtCore.QSortFilterProxyModel,
+ "setRecursiveFilteringEnabled")
+
+
+class BaseRepresentationModel(object):
+ """Methods for SyncServer useful in multiple models"""
+
+ def reset_sync_server(self, project_name=None):
+ """Sets/Resets sync server vars after every change (refresh.)"""
+ repre_icons = {}
+ sync_server = None
+ active_site = active_provider = None
+ remote_site = remote_provider = None
+
+ if not project_name:
+ project_name = self.dbcon.Session["AVALON_PROJECT"]
+ else:
+ self.dbcon.Session["AVALON_PROJECT"] = project_name
+
+ if project_name:
+ manager = ModulesManager()
+ sync_server = manager.modules_by_name["sync_server"]
+
+ if project_name in sync_server.get_enabled_projects():
+ active_site = sync_server.get_active_site(project_name)
+ active_provider = sync_server.get_provider_for_site(
+ project_name, active_site)
+ if active_site == 'studio': # for studio use explicit icon
+ active_provider = 'studio'
+
+ remote_site = sync_server.get_remote_site(project_name)
+ remote_provider = sync_server.get_provider_for_site(
+ project_name, remote_site)
+ if remote_site == 'studio': # for studio use explicit icon
+ remote_provider = 'studio'
+
+ repre_icons = lib.get_repre_icons()
+
+ self.repre_icons = repre_icons
+ self.sync_server = sync_server
+ self.active_site = active_site
+ self.active_provider = active_provider
+ self.remote_site = remote_site
+ self.remote_provider = remote_provider
+
+
+class SubsetsModel(TreeModel, BaseRepresentationModel):
+ doc_fetched = QtCore.Signal()
+ refreshed = QtCore.Signal(bool)
+
+ Columns = [
+ "subset",
+ "asset",
+ "family",
+ "version",
+ "time",
+ "author",
+ "frames",
+ "duration",
+ "handles",
+ "step",
+ "repre_info"
+ ]
+
+ column_labels_mapping = {
+ "subset": "Subset",
+ "asset": "Asset",
+ "family": "Family",
+ "version": "Version",
+ "time": "Time",
+ "author": "Author",
+ "frames": "Frames",
+ "duration": "Duration",
+ "handles": "Handles",
+ "step": "Step",
+ "repre_info": "Availability"
+ }
+
+ SortAscendingRole = QtCore.Qt.UserRole + 2
+ SortDescendingRole = QtCore.Qt.UserRole + 3
+ merged_subset_colors = [
+ (55, 161, 222), # Light Blue
+ (231, 176, 0), # Yellow
+ (154, 13, 255), # Purple
+ (130, 184, 30), # Light Green
+ (211, 79, 63), # Light Red
+ (179, 181, 182), # Grey
+ (194, 57, 179), # Pink
+ (0, 120, 215), # Dark Blue
+ (0, 204, 106), # Dark Green
+ (247, 99, 12), # Orange
+ ]
+ not_last_hero_brush = QtGui.QBrush(QtGui.QColor(254, 121, 121))
+
+ # Should be minimum of required asset document keys
+ asset_doc_projection = {
+ "name": 1,
+ "label": 1
+ }
+ # Should be minimum of required subset document keys
+ subset_doc_projection = {
+ "name": 1,
+ "parent": 1,
+ "schema": 1,
+ "data.families": 1,
+ "data.subsetGroup": 1
+ }
+
+ def __init__(
+ self,
+ dbcon,
+ groups_config,
+ family_config_cache,
+ grouping=True,
+ parent=None,
+ asset_doc_projection=None,
+ subset_doc_projection=None
+ ):
+ super(SubsetsModel, self).__init__(parent=parent)
+
+ self.dbcon = dbcon
+
+ # Projections for Mongo queries
+ # - let ability to modify them if used in tools that require more than
+ # defaults
+ if asset_doc_projection:
+ self.asset_doc_projection = asset_doc_projection
+
+ if subset_doc_projection:
+ self.subset_doc_projection = subset_doc_projection
+
+ self.asset_doc_projection = asset_doc_projection
+ self.subset_doc_projection = subset_doc_projection
+
+ self.repre_icons = {}
+ self.sync_server = None
+ self.active_site = self.active_provider = None
+
+ self.columns_index = dict(
+ (key, idx) for idx, key in enumerate(self.Columns)
+ )
+ self._asset_ids = None
+
+ self.groups_config = groups_config
+ self.family_config_cache = family_config_cache
+ self._sorter = None
+ self._grouping = grouping
+ self._icons = {
+ "subset": qtawesome.icon("fa.file-o", color=style.colors.default)
+ }
+
+ self._doc_fetching_thread = None
+ self._doc_fetching_stop = False
+ self._doc_payload = {}
+
+ self.doc_fetched.connect(self.on_doc_fetched)
+
+ self.refresh()
+
+ def set_assets(self, asset_ids):
+ self._asset_ids = asset_ids
+ self.refresh()
+
+ def set_grouping(self, state):
+ self._grouping = state
+ self.on_doc_fetched()
+
+ def get_subsets_families(self):
+ return self._doc_payload.get("subset_families") or set()
+
+ def setData(self, index, value, role=QtCore.Qt.EditRole):
+ # Trigger additional edit when `version` column changed
+ # because it also updates the information in other columns
+ if index.column() == self.columns_index["version"]:
+ item = index.internalPointer()
+ parent = item["_id"]
+ if isinstance(value, HeroVersionType):
+ versions = list(self.dbcon.find({
+ "type": {"$in": ["version", "hero_version"]},
+ "parent": parent
+ }, sort=[("name", -1)]))
+
+ version = None
+ last_version = None
+ for __version in versions:
+ if __version["type"] == "hero_version":
+ version = __version
+ elif last_version is None:
+ last_version = __version
+
+ if version is not None and last_version is not None:
+ break
+
+ _version = None
+ for __version in versions:
+ if __version["_id"] == version["version_id"]:
+ _version = __version
+ break
+
+ version["data"] = _version["data"]
+ version["name"] = _version["name"]
+ version["is_from_latest"] = (
+ last_version["_id"] == _version["_id"]
+ )
+
+ else:
+ version = self.dbcon.find_one({
+ "name": value,
+ "type": "version",
+ "parent": parent
+ })
+
+ # update availability on active site when version changes
+ if self.sync_server.enabled and version:
+ site = self.active_site
+ query = self._repre_per_version_pipeline([version["_id"]],
+ site)
+ docs = list(self.dbcon.aggregate(query))
+ if docs:
+ repre = docs.pop()
+ version["data"].update(self._get_repre_dict(repre))
+
+ self.set_version(index, version)
+
+ return super(SubsetsModel, self).setData(index, value, role)
+
+ def set_version(self, index, version):
+ """Update the version data of the given index.
+
+ Arguments:
+ index (QtCore.QModelIndex): The model index.
+ version (dict) Version document in the database.
+
+ """
+
+ assert isinstance(index, QtCore.QModelIndex)
+ if not index.isValid():
+ return
+
+ item = index.internalPointer()
+
+ assert version["parent"] == item["_id"], (
+ "Version does not belong to subset"
+ )
+
+ # Get the data from the version
+ version_data = version.get("data", dict())
+
+ # Compute frame ranges (if data is present)
+ frame_start = version_data.get(
+ "frameStart",
+ # backwards compatibility
+ version_data.get("startFrame", None)
+ )
+ frame_end = version_data.get(
+ "frameEnd",
+ # backwards compatibility
+ version_data.get("endFrame", None)
+ )
+
+ handle_start = version_data.get("handleStart", None)
+ handle_end = version_data.get("handleEnd", None)
+ if handle_start is not None and handle_end is not None:
+ handles = "{}-{}".format(str(handle_start), str(handle_end))
+ else:
+ handles = version_data.get("handles", None)
+
+ if frame_start is not None and frame_end is not None:
+ # Remove superfluous zeros from numbers (3.0 -> 3) to improve
+ # readability for most frame ranges
+ start_clean = ("%f" % frame_start).rstrip("0").rstrip(".")
+ end_clean = ("%f" % frame_end).rstrip("0").rstrip(".")
+ frames = "{0}-{1}".format(start_clean, end_clean)
+ duration = frame_end - frame_start + 1
+ else:
+ frames = None
+ duration = None
+
+ schema_maj_version, _ = schema.get_schema_version(item["schema"])
+ if schema_maj_version < 3:
+ families = version_data.get("families", [None])
+ else:
+ families = item["data"]["families"]
+
+ family = None
+ if families:
+ family = families[0]
+
+ family_config = self.family_config_cache.family_config(family)
+
+ item.update({
+ "version": version["name"],
+ "version_document": version,
+ "author": version_data.get("author", None),
+ "time": version_data.get("time", None),
+ "family": family,
+ "familyLabel": family_config.get("label", family),
+ "familyIcon": family_config.get("icon", None),
+ "families": set(families),
+ "frameStart": frame_start,
+ "frameEnd": frame_end,
+ "duration": duration,
+ "handles": handles,
+ "frames": frames,
+ "step": version_data.get("step", None),
+ })
+
+ repre_info = version_data.get("repre_info")
+ if repre_info:
+ item["repre_info"] = repre_info
+ item["repre_icon"] = version_data.get("repre_icon")
+
+ def _fetch(self):
+ asset_docs = self.dbcon.find(
+ {
+ "type": "asset",
+ "_id": {"$in": self._asset_ids}
+ },
+ self.asset_doc_projection
+ )
+ asset_docs_by_id = {
+ asset_doc["_id"]: asset_doc
+ for asset_doc in asset_docs
+ }
+
+ subset_docs_by_id = {}
+ subset_docs = self.dbcon.find(
+ {
+ "type": "subset",
+ "parent": {"$in": self._asset_ids}
+ },
+ self.subset_doc_projection
+ )
+ subset_families = set()
+ for subset_doc in subset_docs:
+ if self._doc_fetching_stop:
+ return
+
+ families = subset_doc.get("data", {}).get("families")
+ if families:
+ subset_families.add(families[0])
+
+ subset_docs_by_id[subset_doc["_id"]] = subset_doc
+
+ subset_ids = list(subset_docs_by_id.keys())
+ _pipeline = [
+ # Find all versions of those subsets
+ {"$match": {
+ "type": "version",
+ "parent": {"$in": subset_ids}
+ }},
+ # Sorting versions all together
+ {"$sort": {"name": 1}},
+ # Group them by "parent", but only take the last
+ {"$group": {
+ "_id": "$parent",
+ "_version_id": {"$last": "$_id"},
+ "name": {"$last": "$name"},
+ "type": {"$last": "$type"},
+ "data": {"$last": "$data"},
+ "locations": {"$last": "$locations"},
+ "schema": {"$last": "$schema"}
+ }}
+ ]
+ last_versions_by_subset_id = dict()
+ for doc in self.dbcon.aggregate(_pipeline):
+ if self._doc_fetching_stop:
+ return
+ doc["parent"] = doc["_id"]
+ doc["_id"] = doc.pop("_version_id")
+ last_versions_by_subset_id[doc["parent"]] = doc
+
+ hero_versions = self.dbcon.find({
+ "type": "hero_version",
+ "parent": {"$in": subset_ids}
+ })
+ missing_versions = []
+ for hero_version in hero_versions:
+ version_id = hero_version["version_id"]
+ if version_id not in last_versions_by_subset_id:
+ missing_versions.append(version_id)
+
+ missing_versions_by_id = {}
+ if missing_versions:
+ missing_version_docs = self.dbcon.find({
+ "type": "version",
+ "_id": {"$in": missing_versions}
+ })
+ missing_versions_by_id = {
+ missing_version_doc["_id"]: missing_version_doc
+ for missing_version_doc in missing_version_docs
+ }
+
+ for hero_version in hero_versions:
+ version_id = hero_version["version_id"]
+ subset_id = hero_version["parent"]
+
+ version_doc = last_versions_by_subset_id.get(subset_id)
+ if version_doc is None:
+ version_doc = missing_versions_by_id.get(version_id)
+ if version_doc is None:
+ continue
+
+ hero_version["data"] = version_doc["data"]
+ hero_version["name"] = HeroVersionType(version_doc["name"])
+ # Add information if hero version is from latest version
+ hero_version["is_from_latest"] = version_id == version_doc["_id"]
+
+ last_versions_by_subset_id[subset_id] = hero_version
+
+ self._doc_payload = {
+ "asset_docs_by_id": asset_docs_by_id,
+ "subset_docs_by_id": subset_docs_by_id,
+ "subset_families": subset_families,
+ "last_versions_by_subset_id": last_versions_by_subset_id
+ }
+
+ if self.sync_server.enabled:
+ version_ids = set()
+ for _subset_id, doc in last_versions_by_subset_id.items():
+ version_ids.add(doc["_id"])
+
+ site = self.active_site
+ query = self._repre_per_version_pipeline(list(version_ids), site)
+
+ repre_info = {}
+ for doc in self.dbcon.aggregate(query):
+ if self._doc_fetching_stop:
+ return
+ doc["provider"] = self.active_provider
+ repre_info[doc["_id"]] = doc
+
+ self._doc_payload["repre_info_by_version_id"] = repre_info
+
+ self.doc_fetched.emit()
+
+ def fetch_subset_and_version(self):
+ """Query all subsets and latest versions from aggregation
+ (NOTE) The returned version documents are NOT the real version
+ document, it's generated from the MongoDB's aggregation so
+ some of the first level field may not be presented.
+ """
+ self._doc_payload = {}
+ self._doc_fetching_stop = False
+ self._doc_fetching_thread = lib.create_qthread(self._fetch)
+ self._doc_fetching_thread.start()
+
+ def stop_fetch_thread(self):
+ if self._doc_fetching_thread is not None:
+ self._doc_fetching_stop = True
+ while self._doc_fetching_thread.isRunning():
+ pass
+
+ def refresh(self):
+ self.stop_fetch_thread()
+ self.clear()
+
+ self.reset_sync_server()
+
+ if not self._asset_ids:
+ self.doc_fetched.emit()
+ return
+
+ self.fetch_subset_and_version()
+
+ def on_doc_fetched(self):
+ self.clear()
+ self.beginResetModel()
+
+ asset_docs_by_id = self._doc_payload.get(
+ "asset_docs_by_id"
+ )
+ subset_docs_by_id = self._doc_payload.get(
+ "subset_docs_by_id"
+ )
+ last_versions_by_subset_id = self._doc_payload.get(
+ "last_versions_by_subset_id"
+ )
+
+ repre_info_by_version_id = self._doc_payload.get(
+ "repre_info_by_version_id"
+ )
+
+ if (
+ asset_docs_by_id is None
+ or subset_docs_by_id is None
+ or last_versions_by_subset_id is None
+ or len(self._asset_ids) == 0
+ ):
+ self.endResetModel()
+ self.refreshed.emit(False)
+ return
+
+ self._fill_subset_items(
+ asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id,
+ repre_info_by_version_id
+ )
+
+ def create_multiasset_group(
+ self, subset_name, asset_ids, subset_counter, parent_item=None
+ ):
+ subset_color = self.merged_subset_colors[
+ subset_counter % len(self.merged_subset_colors)
+ ]
+ merge_group = Item()
+ merge_group.update({
+ "subset": "{} ({})".format(subset_name, len(asset_ids)),
+ "isMerged": True,
+ "childRow": 0,
+ "subsetColor": subset_color,
+ "assetIds": list(asset_ids),
+ "icon": qtawesome.icon(
+ "fa.circle",
+ color="#{0:02x}{1:02x}{2:02x}".format(*subset_color)
+ )
+ })
+
+ subset_counter += 1
+ self.add_child(merge_group, parent_item)
+
+ return merge_group
+
+ def _fill_subset_items(
+ self, asset_docs_by_id, subset_docs_by_id, last_versions_by_subset_id,
+ repre_info_by_version_id
+ ):
+ _groups_tuple = self.groups_config.split_subsets_for_groups(
+ subset_docs_by_id.values(), self._grouping
+ )
+ groups, subset_docs_without_group, subset_docs_by_group = _groups_tuple
+
+ group_item_by_name = {}
+ for group_data in groups:
+ group_name = group_data["name"]
+ group_item = Item()
+ group_item.update({
+ "subset": group_name,
+ "isGroup": True,
+ "childRow": 0
+ })
+ group_item.update(group_data)
+
+ self.add_child(group_item)
+
+ group_item_by_name[group_name] = {
+ "item": group_item,
+ "index": self.index(group_item.row(), 0)
+ }
+
+ subset_counter = 0
+ for group_name, subset_docs_by_name in subset_docs_by_group.items():
+ parent_item = group_item_by_name[group_name]["item"]
+ parent_index = group_item_by_name[group_name]["index"]
+ for subset_name in sorted(subset_docs_by_name.keys()):
+ subset_docs = subset_docs_by_name[subset_name]
+ asset_ids = [
+ subset_doc["parent"] for subset_doc in subset_docs
+ ]
+ if len(subset_docs) > 1:
+ _parent_item = self.create_multiasset_group(
+ subset_name, asset_ids, subset_counter, parent_item
+ )
+ _parent_index = self.index(
+ _parent_item.row(), 0, parent_index
+ )
+ subset_counter += 1
+ else:
+ _parent_item = parent_item
+ _parent_index = parent_index
+
+ for subset_doc in subset_docs:
+ asset_id = subset_doc["parent"]
+
+ data = copy.deepcopy(subset_doc)
+ data["subset"] = subset_name
+ data["asset"] = asset_docs_by_id[asset_id]["name"]
+
+ last_version = last_versions_by_subset_id.get(
+ subset_doc["_id"]
+ )
+ data["last_version"] = last_version
+
+ # do not show subset without version
+ if not last_version:
+ continue
+
+ data.update(
+ self._get_last_repre_info(repre_info_by_version_id,
+ last_version["_id"]))
+
+ item = Item()
+ item.update(data)
+ self.add_child(item, _parent_item)
+
+ index = self.index(item.row(), 0, _parent_index)
+ self.set_version(index, last_version)
+
+ for subset_name in sorted(subset_docs_without_group.keys()):
+ subset_docs = subset_docs_without_group[subset_name]
+ asset_ids = [subset_doc["parent"] for subset_doc in subset_docs]
+ parent_item = None
+ parent_index = None
+ if len(subset_docs) > 1:
+ parent_item = self.create_multiasset_group(
+ subset_name, asset_ids, subset_counter
+ )
+ parent_index = self.index(parent_item.row(), 0)
+ subset_counter += 1
+
+ for subset_doc in subset_docs:
+ asset_id = subset_doc["parent"]
+
+ data = copy.deepcopy(subset_doc)
+ data["subset"] = subset_name
+ data["asset"] = asset_docs_by_id[asset_id]["name"]
+
+ last_version = last_versions_by_subset_id.get(
+ subset_doc["_id"]
+ )
+ data["last_version"] = last_version
+
+ # do not show subset without version
+ if not last_version:
+ continue
+
+ data.update(
+ self._get_last_repre_info(repre_info_by_version_id,
+ last_version["_id"]))
+
+ item = Item()
+ item.update(data)
+ self.add_child(item, parent_item)
+
+ index = self.index(item.row(), 0, parent_index)
+ self.set_version(index, last_version)
+
+ self.endResetModel()
+ self.refreshed.emit(True)
+
+ def data(self, index, role):
+ if not index.isValid():
+ return
+
+ if role == self.SortDescendingRole:
+ item = index.internalPointer()
+ if item.get("isGroup"):
+ # Ensure groups be on top when sorting by descending order
+ prefix = "2"
+ order = item["order"]
+ else:
+ if item.get("isMerged"):
+ prefix = "1"
+ else:
+ prefix = "0"
+ order = str(super(SubsetsModel, self).data(
+ index, QtCore.Qt.DisplayRole
+ ))
+ return prefix + order
+
+ if role == self.SortAscendingRole:
+ item = index.internalPointer()
+ if item.get("isGroup"):
+ # Ensure groups be on top when sorting by ascending order
+ prefix = "0"
+ order = item["order"]
+ else:
+ if item.get("isMerged"):
+ prefix = "1"
+ else:
+ prefix = "2"
+ order = str(super(SubsetsModel, self).data(
+ index, QtCore.Qt.DisplayRole
+ ))
+ return prefix + order
+
+ if role == QtCore.Qt.DisplayRole:
+ if index.column() == self.columns_index["family"]:
+ # Show familyLabel instead of family
+ item = index.internalPointer()
+ return item.get("familyLabel", None)
+
+ elif role == QtCore.Qt.DecorationRole:
+
+ # Add icon to subset column
+ if index.column() == self.columns_index["subset"]:
+ item = index.internalPointer()
+ if item.get("isGroup") or item.get("isMerged"):
+ return item["icon"]
+ else:
+ return self._icons["subset"]
+
+ # Add icon to family column
+ if index.column() == self.columns_index["family"]:
+ item = index.internalPointer()
+ return item.get("familyIcon", None)
+
+ if index.column() == self.columns_index.get("repre_info"):
+ item = index.internalPointer()
+ return item.get("repre_icon", None)
+
+ elif role == QtCore.Qt.ForegroundRole:
+ item = index.internalPointer()
+ version_doc = item.get("version_document")
+ if version_doc and version_doc.get("type") == "hero_version":
+ if not version_doc["is_from_latest"]:
+ return self.not_last_hero_brush
+
+ return super(SubsetsModel, self).data(index, role)
+
+ def flags(self, index):
+ flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
+
+ # Make the version column editable
+ if index.column() == self.columns_index["version"]:
+ flags |= QtCore.Qt.ItemIsEditable
+
+ return flags
+
+ def headerData(self, section, orientation, role):
+ """Remap column names to labels"""
+ if role == QtCore.Qt.DisplayRole:
+ if section < len(self.Columns):
+ key = self.Columns[section]
+ return self.column_labels_mapping.get(key) or key
+
+ super(TreeModel, self).headerData(section, orientation, role)
+
+ def _get_last_repre_info(self, repre_info_by_version_id, last_version_id):
+ data = {}
+ if repre_info_by_version_id:
+ repre_info = repre_info_by_version_id.get(last_version_id)
+ return self._get_repre_dict(repre_info)
+
+ return data
+
+ def _get_repre_dict(self, repre_info):
+ """Returns icon and str representation of availability"""
+ data = {}
+ if repre_info:
+ repres_str = "{}/{}".format(
+ int(math.floor(float(repre_info['avail_repre']))),
+ int(math.floor(float(repre_info['repre_count']))))
+
+ data["repre_info"] = repres_str
+ data["repre_icon"] = self.repre_icons.get(self.active_provider)
+
+ return data
+
+ def _repre_per_version_pipeline(self, version_ids, site):
+ query = [
+ {"$match": {"parent": {"$in": version_ids},
+ "type": "representation",
+ "files.sites.name": {"$exists": 1}}},
+ {"$unwind": "$files"},
+ {'$addFields': {
+ 'order_local': {
+ '$filter': {'input': '$files.sites', 'as': 'p',
+ 'cond': {'$eq': ['$$p.name', site]}
+ }}
+ }},
+ {'$addFields': {
+ 'progress_local': {"$arrayElemAt": [{
+ '$cond': [{'$size': "$order_local.progress"},
+ "$order_local.progress",
+ # if exists created_dt count is as available
+ {'$cond': [
+ {'$size': "$order_local.created_dt"},
+ [1],
+ [0]
+ ]}
+ ]}, 0]}
+ }},
+ {'$group': { # first group by repre
+ '_id': '$_id',
+ 'parent': {'$first': '$parent'},
+ 'files_count': {'$sum': 1},
+ 'files_avail': {'$sum': "$progress_local"},
+ 'avail_ratio': {'$first': {
+ '$divide': [{'$sum': "$progress_local"}, {'$sum': 1}]}}
+ }},
+ {'$group': { # second group by parent, eg version_id
+ '_id': '$parent',
+ 'repre_count': {'$sum': 1}, # total representations
+ # fully available representation for site
+ 'avail_repre': {'$sum': "$avail_ratio"}
+ }},
+ ]
+ return query
+
+
+class GroupMemberFilterProxyModel(QtCore.QSortFilterProxyModel):
+ """Provide the feature of filtering group by the acceptance of members
+
+ The subset group nodes will not be filtered directly, the group node's
+ acceptance depends on it's child subsets' acceptance.
+
+ """
+
+ if is_filtering_recursible():
+ def _is_group_acceptable(self, index, node):
+ # (NOTE) With the help of `RecursiveFiltering` feature from
+ # Qt 5.10, group always not be accepted by default.
+ return False
+ filter_accepts_group = _is_group_acceptable
+
+ else:
+ # Patch future function
+ setRecursiveFilteringEnabled = (lambda *args: None)
+
+ def _is_group_acceptable(self, index, model):
+ # (NOTE) This is not recursive.
+ for child_row in range(model.rowCount(index)):
+ if self.filterAcceptsRow(child_row, index):
+ return True
+ return False
+ filter_accepts_group = _is_group_acceptable
+
+ def __init__(self, *args, **kwargs):
+ super(GroupMemberFilterProxyModel, self).__init__(*args, **kwargs)
+ self.setRecursiveFilteringEnabled(True)
+
+
+class SubsetFilterProxyModel(GroupMemberFilterProxyModel):
+ def filterAcceptsRow(self, row, parent):
+ model = self.sourceModel()
+ index = model.index(row, self.filterKeyColumn(), parent)
+ item = index.internalPointer()
+ if item.get("isGroup"):
+ return self.filter_accepts_group(index, model)
+ return super(
+ SubsetFilterProxyModel, self
+ ).filterAcceptsRow(row, parent)
+
+
+class FamiliesFilterProxyModel(GroupMemberFilterProxyModel):
+ """Filters to specified families"""
+
+ def __init__(self, *args, **kwargs):
+ super(FamiliesFilterProxyModel, self).__init__(*args, **kwargs)
+ self._families = set()
+
+ def familyFilter(self):
+ return self._families
+
+ def setFamiliesFilter(self, values):
+ """Set the families to include"""
+ assert isinstance(values, (tuple, list, set))
+ self._families = set(values)
+ self.invalidateFilter()
+
+ def filterAcceptsRow(self, row=0, parent=None):
+ if not self._families:
+ return False
+
+ model = self.sourceModel()
+ index = model.index(row, 0, parent=parent or QtCore.QModelIndex())
+
+ # Ensure index is valid
+ if not index.isValid() or index is None:
+ return True
+
+ # Get the item data and validate
+ item = model.data(index, TreeModel.ItemRole)
+
+ if item.get("isGroup"):
+ return self.filter_accepts_group(index, model)
+
+ family = item.get("family")
+ if not family:
+ return True
+
+ # We want to keep the families which are not in the list
+ return family in self._families
+
+ def sort(self, column, order):
+ proxy = self.sourceModel()
+ model = proxy.sourceModel()
+ # We need to know the sorting direction for pinning groups on top
+ if order == QtCore.Qt.AscendingOrder:
+ self.setSortRole(model.SortAscendingRole)
+ else:
+ self.setSortRole(model.SortDescendingRole)
+
+ super(FamiliesFilterProxyModel, self).sort(column, order)
+
+
+class RepresentationSortProxyModel(GroupMemberFilterProxyModel):
+ """To properly sort progress string"""
+ def lessThan(self, left, right):
+ source_model = self.sourceModel()
+ progress_indexes = [source_model.Columns.index("active_site"),
+ source_model.Columns.index("remote_site")]
+ if left.column() in progress_indexes:
+ left_data = self.sourceModel().data(left, QtCore.Qt.DisplayRole)
+ right_data = self.sourceModel().data(right, QtCore.Qt.DisplayRole)
+ left_val = re.sub("[^0-9]", '', left_data)
+ right_val = re.sub("[^0-9]", '', right_data)
+
+ return int(left_val) < int(right_val)
+
+ return super(RepresentationSortProxyModel, self).lessThan(left, right)
+
+
+class RepresentationModel(TreeModel, BaseRepresentationModel):
+
+ doc_fetched = QtCore.Signal()
+ refreshed = QtCore.Signal(bool)
+
+ SiteNameRole = QtCore.Qt.UserRole + 2
+ ProgressRole = QtCore.Qt.UserRole + 3
+ SiteSideRole = QtCore.Qt.UserRole + 4
+ IdRole = QtCore.Qt.UserRole + 5
+ ContextRole = QtCore.Qt.UserRole + 6
+
+ Columns = [
+ "name",
+ "subset",
+ "asset",
+ "active_site",
+ "remote_site"
+ ]
+
+ column_labels_mapping = {
+ "name": "Name",
+ "subset": "Subset",
+ "asset": "Asset",
+ "active_site": "Active",
+ "remote_site": "Remote"
+ }
+
+ def __init__(self, dbcon, header, version_ids):
+ super(RepresentationModel, self).__init__()
+ self.dbcon = dbcon
+ self._data = []
+ self._header = header
+ self.version_ids = version_ids
+
+ manager = ModulesManager()
+ sync_server = active_site = remote_site = None
+ active_provider = remote_provider = None
+
+ project = dbcon.Session["AVALON_PROJECT"]
+ if project:
+ sync_server = manager.modules_by_name["sync_server"]
+ active_site = sync_server.get_active_site(project)
+ remote_site = sync_server.get_remote_site(project)
+
+ # TODO refactor
+ active_provider = \
+ sync_server.get_provider_for_site(project,
+ active_site)
+ if active_site == 'studio':
+ active_provider = 'studio'
+
+ remote_provider = \
+ sync_server.get_provider_for_site(project,
+ remote_site)
+
+ if remote_site == 'studio':
+ remote_provider = 'studio'
+
+ self.sync_server = sync_server
+ self.active_site = active_site
+ self.active_provider = active_provider
+ self.remote_site = remote_site
+ self.remote_provider = remote_provider
+
+ self.doc_fetched.connect(self.on_doc_fetched)
+
+ self._docs = {}
+ self._icons = lib.get_repre_icons()
+ self._icons["repre"] = qtawesome.icon("fa.file-o",
+ color=style.colors.default)
+
+ def set_version_ids(self, version_ids):
+ self.version_ids = version_ids
+ self.refresh()
+
+ def data(self, index, role):
+ item = index.internalPointer()
+
+ if role == self.IdRole:
+ return item.get("_id")
+
+ if role == QtCore.Qt.DecorationRole:
+ # Add icon to subset column
+ if index.column() == self.Columns.index("name"):
+ if item.get("isMerged"):
+ return item["icon"]
+ else:
+ return self._icons["repre"]
+
+ active_index = self.Columns.index("active_site")
+ remote_index = self.Columns.index("remote_site")
+ if role == QtCore.Qt.DisplayRole:
+ progress = None
+ label = ''
+ if index.column() == active_index:
+ progress = item.get("active_site_progress", 0)
+ elif index.column() == remote_index:
+ progress = item.get("remote_site_progress", 0)
+
+ if progress is not None:
+ # site added, sync in progress
+ progress_str = "not avail."
+ if progress >= 0:
+ # progress == 0 for isMerged is unavailable
+ if progress == 0 and item.get("isMerged"):
+ progress_str = "not avail."
+ else:
+ progress_str = "{}% {}".format(int(progress * 100),
+ label)
+
+ return progress_str
+
+ if role == QtCore.Qt.DecorationRole:
+ if index.column() == active_index:
+ return item.get("active_site_icon", None)
+ if index.column() == remote_index:
+ return item.get("remote_site_icon", None)
+
+ if role == self.SiteNameRole:
+ if index.column() == active_index:
+ return item.get("active_site_name", None)
+ if index.column() == remote_index:
+ return item.get("remote_site_name", None)
+
+ if role == self.SiteSideRole:
+ if index.column() == active_index:
+ return "active"
+ if index.column() == remote_index:
+ return "remote"
+
+ if role == self.ProgressRole:
+ if index.column() == active_index:
+ return item.get("active_site_progress", 0)
+ if index.column() == remote_index:
+ return item.get("remote_site_progress", 0)
+
+ return super(RepresentationModel, self).data(index, role)
+
+ def on_doc_fetched(self):
+ self.clear()
+ self.beginResetModel()
+ subsets = set()
+ assets = set()
+ repre_groups = {}
+ repre_groups_items = {}
+ group = None
+ self._items_by_id = {}
+ for doc in self._docs:
+ if len(self.version_ids) > 1:
+ group = repre_groups.get(doc["name"])
+ if not group:
+ group_item = Item()
+ group_item.update({
+ "_id": doc["_id"],
+ "name": doc["name"],
+ "isMerged": True,
+ "childRow": 0,
+ "active_site_name": self.active_site,
+ "remote_site_name": self.remote_site,
+ "icon": qtawesome.icon(
+ "fa.folder",
+ color=style.colors.default
+ )
+ })
+ self.add_child(group_item, None)
+ repre_groups[doc["name"]] = group_item
+ repre_groups_items[doc["name"]] = 0
+ group = group_item
+
+ progress = lib.get_progress_for_repre(doc,
+ self.active_site,
+ self.remote_site)
+
+ active_site_icon = self._icons.get(self.active_provider)
+ remote_site_icon = self._icons.get(self.remote_provider)
+
+ data = {
+ "_id": doc["_id"],
+ "name": doc["name"],
+ "subset": doc["context"]["subset"],
+ "asset": doc["context"]["asset"],
+ "isMerged": False,
+
+ "active_site_icon": active_site_icon,
+ "remote_site_icon": remote_site_icon,
+ "active_site_name": self.active_site,
+ "remote_site_name": self.remote_site,
+ "active_site_progress": progress[self.active_site],
+ "remote_site_progress": progress[self.remote_site]
+ }
+ subsets.add(doc["context"]["subset"])
+ assets.add(doc["context"]["subset"])
+
+ item = Item()
+ item.update(data)
+
+ current_progress = {
+ 'active_site_progress': progress[self.active_site],
+ 'remote_site_progress': progress[self.remote_site]
+ }
+ if group:
+ group = self._sum_group_progress(doc["name"], group,
+ current_progress,
+ repre_groups_items)
+
+ self.add_child(item, group)
+
+ # finalize group average progress
+ for group_name, group in repre_groups.items():
+ items_cnt = repre_groups_items[group_name]
+ active_progress = group.get("active_site_progress", 0)
+ group["active_site_progress"] = active_progress / items_cnt
+ remote_progress = group.get("remote_site_progress", 0)
+ group["remote_site_progress"] = remote_progress / items_cnt
+
+ self.endResetModel()
+ self.refreshed.emit(False)
+
+ def refresh(self):
+ docs = []
+ session_project = self.dbcon.Session['AVALON_PROJECT']
+ if not session_project:
+ return
+
+ if self.version_ids:
+ # Simple find here for now, expected to receive lower number of
+ # representations and logic could be in Python
+ docs = list(self.dbcon.find(
+ {"type": "representation", "parent": {"$in": self.version_ids},
+ "files.sites.name": {"$exists": 1}}, self.projection()))
+ self._docs = docs
+
+ self.doc_fetched.emit()
+
+ @classmethod
+ def projection(cls):
+ return {
+ "_id": 1,
+ "name": 1,
+ "context.subset": 1,
+ "context.asset": 1,
+ "context.version": 1,
+ "context.representation": 1,
+ 'files.sites': 1
+ }
+
+ def _sum_group_progress(self, repre_name, group, current_item_progress,
+ repre_groups_items):
+ """
+ Update final group progress
+ Called after every item in group is added
+
+ Args:
+ repre_name(string)
+ group(dict): info about group of selected items
+ current_item_progress(dict): {'active_site_progress': XX,
+ 'remote_site_progress': YY}
+ repre_groups_items(dict)
+ Returns:
+ (dict): updated group info
+ """
+ repre_groups_items[repre_name] += 1
+
+ for key, progress in current_item_progress.items():
+ group[key] = (group.get(key, 0) + max(progress, 0))
+
+ return group
diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py
new file mode 100644
index 0000000000..6b94fc6e44
--- /dev/null
+++ b/openpype/tools/loader/widgets.py
@@ -0,0 +1,1591 @@
+import os
+import sys
+import inspect
+import datetime
+import pprint
+import traceback
+import collections
+
+from Qt import QtWidgets, QtCore, QtGui
+
+from avalon import api, pipeline
+from avalon.lib import HeroVersionType
+
+from openpype.tools.utils import lib as tools_lib
+from openpype.tools.utils.delegates import (
+ VersionDelegate,
+ PrettyTimeDelegate
+)
+from openpype.tools.utils.widgets import OptionalMenu
+from openpype.tools.utils.views import (
+ TreeViewSpinner,
+ DeselectableTreeView
+)
+
+from .model import (
+ SubsetsModel,
+ SubsetFilterProxyModel,
+ FamiliesFilterProxyModel,
+ RepresentationModel,
+ RepresentationSortProxyModel
+)
+from . import lib
+
+
+class OverlayFrame(QtWidgets.QFrame):
+ def __init__(self, label, parent):
+ super(OverlayFrame, self).__init__(parent)
+
+ label_widget = QtWidgets.QLabel(label, self)
+ main_layout = QtWidgets.QVBoxLayout(self)
+ main_layout.addWidget(label_widget, 1, QtCore.Qt.AlignCenter)
+
+ self.label_widget = label_widget
+
+ label_widget.setStyleSheet("background: transparent;")
+ self.setStyleSheet((
+ "background: rgba(0, 0, 0, 127);"
+ "font-size: 60pt;"
+ ))
+
+ def set_label(self, label):
+ self.label_widget.setText(label)
+
+
+class LoadErrorMessageBox(QtWidgets.QDialog):
+ def __init__(self, messages, parent=None):
+ super(LoadErrorMessageBox, self).__init__(parent)
+ self.setWindowTitle("Loading failed")
+ self.setFocusPolicy(QtCore.Qt.StrongFocus)
+
+ body_layout = QtWidgets.QVBoxLayout(self)
+
+ main_label = (
+ "Failed to load items"
+ )
+ main_label_widget = QtWidgets.QLabel(main_label, self)
+ body_layout.addWidget(main_label_widget)
+
+ item_name_template = (
+ "Subset: {} "
+ "Version: {} "
+ "Representation: {} "
+ )
+ exc_msg_template = "{}"
+
+ for exc_msg, tb, repre, subset, version in messages:
+ line = self._create_line()
+ body_layout.addWidget(line)
+
+ item_name = item_name_template.format(subset, version, repre)
+ item_name_widget = QtWidgets.QLabel(
+ item_name.replace("\n", " "), self
+ )
+ body_layout.addWidget(item_name_widget)
+
+ exc_msg = exc_msg_template.format(exc_msg.replace("\n", " "))
+ message_label_widget = QtWidgets.QLabel(exc_msg, self)
+ body_layout.addWidget(message_label_widget)
+
+ if tb:
+ tb_widget = QtWidgets.QLabel(tb.replace("\n", " "), self)
+ tb_widget.setTextInteractionFlags(
+ QtCore.Qt.TextBrowserInteraction
+ )
+ body_layout.addWidget(tb_widget)
+
+ footer_widget = QtWidgets.QWidget(self)
+ footer_layout = QtWidgets.QHBoxLayout(footer_widget)
+ buttonBox = QtWidgets.QDialogButtonBox(QtCore.Qt.Vertical)
+ buttonBox.setStandardButtons(
+ QtWidgets.QDialogButtonBox.StandardButton.Ok
+ )
+ buttonBox.accepted.connect(self._on_accept)
+ footer_layout.addWidget(buttonBox, alignment=QtCore.Qt.AlignRight)
+ body_layout.addWidget(footer_widget)
+
+ def _on_accept(self):
+ self.close()
+
+ def _create_line(self):
+ line = QtWidgets.QFrame(self)
+ line.setFixedHeight(2)
+ line.setFrameShape(QtWidgets.QFrame.HLine)
+ line.setFrameShadow(QtWidgets.QFrame.Sunken)
+ return line
+
+
+class SubsetWidget(QtWidgets.QWidget):
+ """A widget that lists the published subsets for an asset"""
+
+ active_changed = QtCore.Signal() # active index changed
+ version_changed = QtCore.Signal() # version state changed for a subset
+ load_started = QtCore.Signal()
+ load_ended = QtCore.Signal()
+ refreshed = QtCore.Signal(bool)
+
+ default_widths = (
+ ("subset", 200),
+ ("asset", 130),
+ ("family", 90),
+ ("version", 60),
+ ("time", 125),
+ ("author", 75),
+ ("frames", 75),
+ ("duration", 60),
+ ("handles", 55),
+ ("step", 10),
+ ("repre_info", 65)
+ )
+
+ def __init__(
+ self,
+ dbcon,
+ groups_config,
+ family_config_cache,
+ enable_grouping=True,
+ tool_name=None,
+ parent=None
+ ):
+ super(SubsetWidget, self).__init__(parent=parent)
+
+ self.dbcon = dbcon
+ self.tool_name = tool_name
+
+ model = SubsetsModel(
+ dbcon,
+ groups_config,
+ family_config_cache,
+ grouping=enable_grouping
+ )
+ proxy = SubsetFilterProxyModel()
+ family_proxy = FamiliesFilterProxyModel()
+ family_proxy.setSourceModel(proxy)
+
+ subset_filter = QtWidgets.QLineEdit()
+ subset_filter.setPlaceholderText("Filter subsets..")
+
+ groupable = QtWidgets.QCheckBox("Enable Grouping")
+ groupable.setChecked(enable_grouping)
+
+ top_bar_layout = QtWidgets.QHBoxLayout()
+ top_bar_layout.addWidget(subset_filter)
+ top_bar_layout.addWidget(groupable)
+
+ view = TreeViewSpinner()
+ view.setObjectName("SubsetView")
+ view.setIndentation(20)
+ view.setStyleSheet("""
+ QTreeView::item{
+ padding: 5px 1px;
+ border: 0px;
+ }
+ """)
+ view.setAllColumnsShowFocus(True)
+
+ # Set view delegates
+ version_delegate = VersionDelegate(self.dbcon)
+ column = model.Columns.index("version")
+ view.setItemDelegateForColumn(column, version_delegate)
+
+ time_delegate = PrettyTimeDelegate()
+ column = model.Columns.index("time")
+ view.setItemDelegateForColumn(column, time_delegate)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.setContentsMargins(0, 0, 0, 0)
+ layout.addLayout(top_bar_layout)
+ layout.addWidget(view)
+
+ view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
+ view.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
+ view.setSortingEnabled(True)
+ view.sortByColumn(1, QtCore.Qt.AscendingOrder)
+ view.setAlternatingRowColors(True)
+
+ self.data = {
+ "delegates": {
+ "version": version_delegate,
+ "time": time_delegate
+ },
+ "state": {
+ "groupable": groupable
+ }
+ }
+
+ self.proxy = proxy
+ self.model = model
+ self.view = view
+ self.filter = subset_filter
+ self.family_proxy = family_proxy
+
+ # settings and connections
+ self.proxy.setSourceModel(self.model)
+ self.proxy.setDynamicSortFilter(True)
+ self.proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
+
+ self.view.setModel(self.family_proxy)
+ self.view.customContextMenuRequested.connect(self.on_context_menu)
+
+ for column_name, width in self.default_widths:
+ idx = model.Columns.index(column_name)
+ view.setColumnWidth(idx, width)
+
+ actual_project = dbcon.Session["AVALON_PROJECT"]
+ self.on_project_change(actual_project)
+
+ selection = view.selectionModel()
+ selection.selectionChanged.connect(self.active_changed)
+
+ version_delegate.version_changed.connect(self.version_changed)
+
+ groupable.stateChanged.connect(self.set_grouping)
+
+ self.filter.textChanged.connect(self.proxy.setFilterRegExp)
+ self.filter.textChanged.connect(self.view.expandAll)
+ model.refreshed.connect(self.refreshed)
+
+ self.model.refresh()
+
+ def get_subsets_families(self):
+ return self.model.get_subsets_families()
+
+ def set_family_filters(self, families):
+ self.family_proxy.setFamiliesFilter(families)
+
+ def is_groupable(self):
+ return self.data["state"]["groupable"].checkState()
+
+ def set_grouping(self, state):
+ with tools_lib.preserve_selection(tree_view=self.view,
+ current_index=False):
+ self.model.set_grouping(state)
+
+ def set_loading_state(self, loading, empty):
+ view = self.view
+
+ if view.is_loading != loading:
+ if loading:
+ view.spinner.repaintNeeded.connect(view.viewport().update)
+ else:
+ view.spinner.repaintNeeded.disconnect()
+
+ view.is_loading = loading
+ view.is_empty = empty
+
+ def _repre_contexts_for_loaders_filter(self, items):
+ version_docs_by_id = {
+ item["version_document"]["_id"]: item["version_document"]
+ for item in items
+ }
+ version_docs_by_subset_id = collections.defaultdict(list)
+ for item in items:
+ subset_id = item["version_document"]["parent"]
+ version_docs_by_subset_id[subset_id].append(
+ item["version_document"]
+ )
+
+ subset_docs = list(self.dbcon.find(
+ {
+ "_id": {"$in": list(version_docs_by_subset_id.keys())},
+ "type": "subset"
+ },
+ {
+ "schema": 1,
+ "data.families": 1
+ }
+ ))
+ subset_docs_by_id = {
+ subset_doc["_id"]: subset_doc
+ for subset_doc in subset_docs
+ }
+ version_ids = list(version_docs_by_id.keys())
+ repre_docs = self.dbcon.find(
+ # Query all representations for selected versions at once
+ {
+ "type": "representation",
+ "parent": {"$in": version_ids}
+ },
+ # Query only name and parent from representation
+ {
+ "name": 1,
+ "parent": 1
+ }
+ )
+ repre_docs_by_version_id = {
+ version_id: []
+ for version_id in version_ids
+ }
+ repre_context_by_id = {}
+ for repre_doc in repre_docs:
+ version_id = repre_doc["parent"]
+ repre_docs_by_version_id[version_id].append(repre_doc)
+
+ version_doc = version_docs_by_id[version_id]
+ repre_context_by_id[repre_doc["_id"]] = {
+ "representation": repre_doc,
+ "version": version_doc,
+ "subset": subset_docs_by_id[version_doc["parent"]]
+ }
+ return repre_context_by_id, repre_docs_by_version_id
+
+ def on_project_change(self, project_name):
+ """
+ Called on each project change in parent widget.
+
+ Checks if Sync Server is enabled for a project, pushes changes to
+ model.
+ """
+ enabled = False
+ if project_name:
+ self.model.reset_sync_server(project_name)
+ if self.model.sync_server:
+ enabled_proj = self.model.sync_server.get_enabled_projects()
+ enabled = project_name in enabled_proj
+
+ lib.change_visibility(self.model, self.view, "repre_info", enabled)
+
+ def on_context_menu(self, point):
+ """Shows menu with loader actions on Right-click.
+
+ Registered actions are filtered by selection and help of
+ `loaders_from_representation` from avalon api. Intersection of actions
+ is shown when more subset is selected. When there are not available
+ actions for selected subsets then special action is shown (works as
+ info message to user): "*No compatible loaders for your selection"
+
+ """
+
+ point_index = self.view.indexAt(point)
+ if not point_index.isValid():
+ return
+
+ # Get selected subsets without groups
+ selection = self.view.selectionModel()
+ rows = selection.selectedRows(column=0)
+
+ items = lib.get_selected_items(rows, self.model.ItemRole)
+
+ # Get all representation->loader combinations available for the
+ # index under the cursor, so we can list the user the options.
+ available_loaders = api.discover(api.Loader)
+ if self.tool_name:
+ available_loaders = lib.remove_tool_name_from_loaders(
+ available_loaders, self.tool_name
+ )
+
+ repre_loaders = []
+ subset_loaders = []
+ for loader in available_loaders:
+ # Skip if its a SubsetLoader.
+ if api.SubsetLoader in inspect.getmro(loader):
+ subset_loaders.append(loader)
+ else:
+ repre_loaders.append(loader)
+
+ loaders = list()
+
+ # Bool if is selected only one subset
+ one_item_selected = (len(items) == 1)
+
+ # Prepare variables for multiple selected subsets
+ first_loaders = []
+ found_combinations = None
+
+ is_first = True
+ repre_context_by_id, repre_docs_by_version_id = (
+ self._repre_contexts_for_loaders_filter(items)
+ )
+ for item in items:
+ _found_combinations = []
+ version_id = item["version_document"]["_id"]
+ repre_docs = repre_docs_by_version_id[version_id]
+ for repre_doc in repre_docs:
+ repre_context = repre_context_by_id[repre_doc["_id"]]
+ for loader in pipeline.loaders_from_repre_context(
+ repre_loaders,
+ repre_context
+ ):
+ # do not allow download whole repre, select specific repre
+ if tools_lib.is_sync_loader(loader):
+ continue
+
+ # skip multiple select variant if one is selected
+ if one_item_selected:
+ loaders.append((repre_doc, loader))
+ continue
+
+ # store loaders of first subset
+ if is_first:
+ first_loaders.append((repre_doc, loader))
+
+ # store combinations to compare with other subsets
+ _found_combinations.append(
+ (repre_doc["name"].lower(), loader)
+ )
+
+ # skip multiple select variant if one is selected
+ if one_item_selected:
+ continue
+
+ is_first = False
+ # Store first combinations to compare
+ if found_combinations is None:
+ found_combinations = _found_combinations
+ # Intersect found combinations with all previous subsets
+ else:
+ found_combinations = list(
+ set(found_combinations) & set(_found_combinations)
+ )
+
+ if not one_item_selected:
+ # Filter loaders from first subset by intersected combinations
+ for repre, loader in first_loaders:
+ if (repre["name"], loader) not in found_combinations:
+ continue
+
+ loaders.append((repre, loader))
+
+ # Subset Loaders.
+ for loader in subset_loaders:
+ loaders.append((None, loader))
+
+ loaders = lib.sort_loaders(loaders)
+
+ # Prepare menu content based on selected items
+ menu = OptionalMenu(self)
+ if not loaders:
+ action = lib.get_no_loader_action(menu, one_item_selected)
+ menu.addAction(action)
+ else:
+ repre_contexts = pipeline.get_repres_contexts(
+ repre_context_by_id.keys(), self.dbcon)
+
+ menu = lib.add_representation_loaders_to_menu(
+ loaders, menu, repre_contexts)
+
+ # Show the context action menu
+ global_point = self.view.mapToGlobal(point)
+ action = menu.exec_(global_point)
+ if not action or not action.data():
+ return
+
+ # Find the representation name and loader to trigger
+ action_representation, loader = action.data()
+
+ self.load_started.emit()
+
+ if api.SubsetLoader in inspect.getmro(loader):
+ subset_ids = []
+ subset_version_docs = {}
+ for item in items:
+ subset_id = item["version_document"]["parent"]
+ subset_ids.append(subset_id)
+ subset_version_docs[subset_id] = item["version_document"]
+
+ # get contexts only for selected menu option
+ subset_contexts_by_id = pipeline.get_subset_contexts(subset_ids,
+ self.dbcon)
+ subset_contexts = list(subset_contexts_by_id.values())
+ options = lib.get_options(action, loader, self, subset_contexts)
+
+ error_info = _load_subsets_by_loader(
+ loader, subset_contexts, options, subset_version_docs
+ )
+
+ else:
+ representation_name = action_representation["name"]
+
+ # Run the loader for all selected indices, for those that have the
+ # same representation available
+
+ # Trigger
+ repre_ids = []
+ for item in items:
+ representation = self.dbcon.find_one(
+ {
+ "type": "representation",
+ "name": representation_name,
+ "parent": item["version_document"]["_id"]
+ },
+ {"_id": 1}
+ )
+ if not representation:
+ self.echo("Subset '{}' has no representation '{}'".format(
+ item["subset"], representation_name
+ ))
+ continue
+ repre_ids.append(representation["_id"])
+
+ # get contexts only for selected menu option
+ repre_contexts = pipeline.get_repres_contexts(repre_ids,
+ self.dbcon)
+ options = lib.get_options(action, loader, self,
+ list(repre_contexts.values()))
+
+ error_info = _load_representations_by_loader(
+ loader, repre_contexts, options=options
+ )
+
+ self.load_ended.emit()
+
+ if error_info:
+ box = LoadErrorMessageBox(error_info)
+ box.show()
+
+ def selected_subsets(self, _groups=False, _merged=False, _other=True):
+ selection = self.view.selectionModel()
+ rows = selection.selectedRows(column=0)
+
+ subsets = list()
+ if not any([_groups, _merged, _other]):
+ self.echo((
+ "This is a BUG: Selected_subsets args must contain"
+ " at least one value set to True"
+ ))
+ return subsets
+
+ for row in rows:
+ item = row.data(self.model.ItemRole)
+ if item.get("isGroup"):
+ if not _groups:
+ continue
+
+ elif item.get("isMerged"):
+ if not _merged:
+ continue
+ else:
+ if not _other:
+ continue
+
+ subsets.append(item)
+
+ return subsets
+
+ def group_subsets(self, name, asset_ids, items):
+ field = "data.subsetGroup"
+
+ if name:
+ update = {"$set": {field: name}}
+ self.echo("Group subsets to '%s'.." % name)
+ else:
+ update = {"$unset": {field: ""}}
+ self.echo("Ungroup subsets..")
+
+ subsets = list()
+ for item in items:
+ subsets.append(item["subset"])
+
+ for asset_id in asset_ids:
+ filtr = {
+ "type": "subset",
+ "parent": asset_id,
+ "name": {"$in": subsets},
+ }
+ self.dbcon.update_many(filtr, update)
+
+ def echo(self, message):
+ print(message)
+
+
+class VersionTextEdit(QtWidgets.QTextEdit):
+ """QTextEdit that displays version specific information.
+
+ This also overrides the context menu to add actions like copying
+ source path to clipboard or copying the raw data of the version
+ to clipboard.
+
+ """
+ def __init__(self, dbcon, parent=None):
+ super(VersionTextEdit, self).__init__(parent=parent)
+ self.dbcon = dbcon
+
+ self.data = {
+ "source": None,
+ "raw": None
+ }
+
+ # Reset
+ self.set_version(None)
+
+ def set_version(self, version_doc=None, version_id=None):
+ # TODO expect only filling data (do not query them here!)
+ if not version_doc and not version_id:
+ # Reset state to empty
+ self.data = {
+ "source": None,
+ "raw": None,
+ }
+ self.setText("")
+ self.setEnabled(True)
+ return
+
+ self.setEnabled(True)
+
+ print("Querying..")
+
+ if not version_doc:
+ version_doc = self.dbcon.find_one({
+ "_id": version_id,
+ "type": {"$in": ["version", "hero_version"]}
+ })
+ assert version_doc, "Not a valid version id"
+
+ if version_doc["type"] == "hero_version":
+ _version_doc = self.dbcon.find_one({
+ "_id": version_doc["version_id"],
+ "type": "version"
+ })
+ version_doc["data"] = _version_doc["data"]
+ version_doc["name"] = HeroVersionType(
+ _version_doc["name"]
+ )
+
+ subset = self.dbcon.find_one({
+ "_id": version_doc["parent"],
+ "type": "subset"
+ })
+ assert subset, "No valid subset parent for version"
+
+ # Define readable creation timestamp
+ created = version_doc["data"]["time"]
+ created = datetime.datetime.strptime(created, "%Y%m%dT%H%M%SZ")
+ created = datetime.datetime.strftime(created, "%b %d %Y %H:%M")
+
+ comment = version_doc["data"].get("comment", None) or "No comment"
+
+ source = version_doc["data"].get("source", None)
+ source_label = source if source else "No source"
+
+ # Store source and raw data
+ self.data["source"] = source
+ self.data["raw"] = version_doc
+
+ if version_doc["type"] == "hero_version":
+ version_name = "hero"
+ else:
+ version_name = tools_lib.format_version(version_doc["name"])
+
+ data = {
+ "subset": subset["name"],
+ "version": version_name,
+ "comment": comment,
+ "created": created,
+ "source": source_label
+ }
+
+ self.setHtml((
+ "
{subset}
"
+ "
{version}
"
+ "Comment "
+ "{comment}
"
+
+ "Created "
+ "{created}
"
+
+ "Source "
+ "{source}"
+ ).format(**data))
+
+ def contextMenuEvent(self, event):
+ """Context menu with additional actions"""
+ menu = self.createStandardContextMenu()
+
+ # Add additional actions when any text so we can assume
+ # the version is set.
+ if self.toPlainText().strip():
+
+ menu.addSeparator()
+ action = QtWidgets.QAction("Copy source path to clipboard",
+ menu)
+ action.triggered.connect(self.on_copy_source)
+ menu.addAction(action)
+
+ action = QtWidgets.QAction("Copy raw data to clipboard",
+ menu)
+ action.triggered.connect(self.on_copy_raw)
+ menu.addAction(action)
+
+ menu.exec_(event.globalPos())
+ del menu
+
+ def on_copy_source(self):
+ """Copy formatted source path to clipboard"""
+ source = self.data.get("source", None)
+ if not source:
+ return
+
+ path = source.format(root=api.registered_root())
+ clipboard = QtWidgets.QApplication.clipboard()
+ clipboard.setText(path)
+
+ def on_copy_raw(self):
+ """Copy raw version data to clipboard
+
+ The data is string formatted with `pprint.pformat`.
+
+ """
+ raw = self.data.get("raw", None)
+ if not raw:
+ return
+
+ raw_text = pprint.pformat(raw)
+ clipboard = QtWidgets.QApplication.clipboard()
+ clipboard.setText(raw_text)
+
+
+class ThumbnailWidget(QtWidgets.QLabel):
+
+ aspect_ratio = (16, 9)
+ max_width = 300
+
+ def __init__(self, dbcon, parent=None):
+ super(ThumbnailWidget, self).__init__(parent)
+ self.dbcon = dbcon
+
+ self.current_thumb_id = None
+ self.current_thumbnail = None
+
+ self.setAlignment(QtCore.Qt.AlignCenter)
+
+ # TODO get res path much better way
+ default_pix_path = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ "images",
+ "default_thumbnail.png"
+ )
+ self.default_pix = QtGui.QPixmap(default_pix_path)
+
+ def height(self):
+ width = self.width()
+ asp_w, asp_h = self.aspect_ratio
+
+ return (width / asp_w) * asp_h
+
+ def width(self):
+ width = super(ThumbnailWidget, self).width()
+ if width > self.max_width:
+ width = self.max_width
+ return width
+
+ def set_pixmap(self, pixmap=None):
+ if not pixmap:
+ pixmap = self.default_pix
+ self.current_thumb_id = None
+
+ self.current_thumbnail = pixmap
+
+ pixmap = self.scale_pixmap(pixmap)
+ self.setPixmap(pixmap)
+
+ def resizeEvent(self, _event):
+ if not self.current_thumbnail:
+ return
+ cur_pix = self.scale_pixmap(self.current_thumbnail)
+ self.setPixmap(cur_pix)
+
+ def scale_pixmap(self, pixmap):
+ return pixmap.scaled(
+ self.width(), self.height(), QtCore.Qt.KeepAspectRatio
+ )
+
+ def set_thumbnail(self, entity=None):
+ if not entity:
+ self.set_pixmap()
+ return
+
+ if isinstance(entity, (list, tuple)):
+ if len(entity) == 1:
+ entity = entity[0]
+ else:
+ self.set_pixmap()
+ return
+
+ thumbnail_id = entity.get("data", {}).get("thumbnail_id")
+ if thumbnail_id == self.current_thumb_id:
+ if self.current_thumbnail is None:
+ self.set_pixmap()
+ return
+
+ self.current_thumb_id = thumbnail_id
+ if not thumbnail_id:
+ self.set_pixmap()
+ return
+
+ thumbnail_ent = self.dbcon.find_one(
+ {"type": "thumbnail", "_id": thumbnail_id}
+ )
+ if not thumbnail_ent:
+ return
+
+ thumbnail_bin = pipeline.get_thumbnail_binary(
+ thumbnail_ent, "thumbnail", self.dbcon
+ )
+ if not thumbnail_bin:
+ self.set_pixmap()
+ return
+
+ thumbnail = QtGui.QPixmap()
+ thumbnail.loadFromData(thumbnail_bin)
+
+ self.set_pixmap(thumbnail)
+
+
+class VersionWidget(QtWidgets.QWidget):
+ """A Widget that display information about a specific version"""
+ def __init__(self, dbcon, parent=None):
+ super(VersionWidget, self).__init__(parent=parent)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.setContentsMargins(0, 0, 0, 0)
+ label = QtWidgets.QLabel("Version", self)
+ data = VersionTextEdit(dbcon, self)
+ data.setReadOnly(True)
+
+ layout.addWidget(label)
+ layout.addWidget(data)
+
+ self.data = data
+
+ def set_version(self, version_doc):
+ self.data.set_version(version_doc)
+
+
+class FamilyModel(QtGui.QStandardItemModel):
+ def __init__(self, dbcon, family_config_cache):
+ super(FamilyModel, self).__init__()
+
+ self.dbcon = dbcon
+ self.family_config_cache = family_config_cache
+
+ self._items_by_family = {}
+
+ def refresh(self):
+ families = set()
+ if self.dbcon.Session.get("AVALON_PROJECT"):
+ result = list(self.dbcon.aggregate([
+ {"$match": {
+ "type": "subset"
+ }},
+ {"$project": {
+ "family": {"$arrayElemAt": ["$data.families", 0]}
+ }},
+ {"$group": {
+ "_id": "family_group",
+ "families": {"$addToSet": "$family"}
+ }}
+ ]))
+ if result:
+ families = set(result[0]["families"])
+
+ root_item = self.invisibleRootItem()
+
+ for family in tuple(self._items_by_family.keys()):
+ if family not in families:
+ item = self._items_by_family.pop(family)
+ root_item.removeRow(item.row())
+
+ self.family_config_cache.refresh()
+
+ new_items = []
+ for family in families:
+ family_config = self.family_config_cache.family_config(family)
+ label = family_config.get("label", family)
+ icon = family_config.get("icon", None)
+
+ if family_config.get("state", True):
+ state = QtCore.Qt.Checked
+ else:
+ state = QtCore.Qt.Unchecked
+
+ if family not in self._items_by_family:
+ item = QtGui.QStandardItem(label)
+ item.setFlags(
+ QtCore.Qt.ItemIsEnabled
+ | QtCore.Qt.ItemIsSelectable
+ | QtCore.Qt.ItemIsUserCheckable
+ )
+ new_items.append(item)
+ self._items_by_family[family] = item
+
+ else:
+ item = self._items_by_family[label]
+ item.setData(label, QtCore.Qt.DisplayRole)
+
+ item.setCheckState(state)
+
+ if icon:
+ item.setIcon(icon)
+
+ if new_items:
+ root_item.appendRows(new_items)
+
+
+class FamilyProxyFiler(QtCore.QSortFilterProxyModel):
+ def __init__(self, *args, **kwargs):
+ super(FamilyProxyFiler, self).__init__(*args, **kwargs)
+
+ self._filtering_enabled = False
+ self._enabled_families = set()
+
+ def set_enabled_families(self, families):
+ if self._enabled_families == families:
+ return
+
+ self._enabled_families = families
+ if self._filtering_enabled:
+ self.invalidateFilter()
+
+ def is_filter_enabled(self):
+ return self._filtering_enabled
+
+ def set_filter_enabled(self, enabled=None):
+ if enabled is None:
+ enabled = not self._filtering_enabled
+ elif self._filtering_enabled == enabled:
+ return
+
+ self._filtering_enabled = enabled
+ self.invalidateFilter()
+
+ def filterAcceptsRow(self, row, parent):
+ if not self._filtering_enabled:
+ return True
+
+ if not self._enabled_families:
+ return False
+
+ index = self.sourceModel().index(row, self.filterKeyColumn(), parent)
+ if index.data(QtCore.Qt.DisplayRole) in self._enabled_families:
+ return True
+ return False
+
+
+class FamilyListView(QtWidgets.QListView):
+ active_changed = QtCore.Signal(list)
+
+ def __init__(self, dbcon, family_config_cache, parent=None):
+ super(FamilyListView, self).__init__(parent=parent)
+
+ self.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
+ self.setAlternatingRowColors(True)
+ self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
+
+ family_model = FamilyModel(dbcon, family_config_cache)
+ proxy_model = FamilyProxyFiler()
+ proxy_model.setDynamicSortFilter(True)
+ proxy_model.setSourceModel(family_model)
+
+ self.setModel(proxy_model)
+
+ family_model.dataChanged.connect(self._on_data_change)
+ self.customContextMenuRequested.connect(self._on_context_menu)
+
+ self._family_model = family_model
+ self._proxy_model = proxy_model
+
+ def set_enabled_families(self, families):
+ self._proxy_model.set_enabled_families(families)
+
+ self.set_enabled_family_filtering(True)
+
+ def set_enabled_family_filtering(self, enabled=None):
+ self._proxy_model.set_filter_enabled(enabled)
+
+ def refresh(self):
+ self._family_model.refresh()
+
+ self.active_changed.emit(self.get_enabled_families())
+
+ def get_enabled_families(self):
+ """Return the checked family items"""
+ model = self._family_model
+ checked_families = []
+ for row in range(model.rowCount()):
+ index = model.index(row, 0)
+ if index.data(QtCore.Qt.CheckStateRole) == QtCore.Qt.Checked:
+ family = index.data(QtCore.Qt.DisplayRole)
+ checked_families.append(family)
+
+ return checked_families
+
+ def set_all_unchecked(self):
+ self._set_checkstates(False, self._get_all_indexes())
+
+ def set_all_checked(self):
+ self._set_checkstates(True, self._get_all_indexes())
+
+ def _get_all_indexes(self):
+ indexes = []
+ model = self._family_model
+ for row in range(model.rowCount()):
+ index = model.index(row, 0)
+ indexes.append(index)
+ return indexes
+
+ def _set_checkstates(self, checked, indexes):
+ if not indexes:
+ return
+
+ if checked is None:
+ state = None
+ elif checked:
+ state = QtCore.Qt.Checked
+ else:
+ state = QtCore.Qt.Unchecked
+
+ self.blockSignals(True)
+
+ for index in indexes:
+ index_state = index.data(QtCore.Qt.CheckStateRole)
+ if index_state == state:
+ continue
+
+ new_state = state
+ if new_state is None:
+ if index_state == QtCore.Qt.Checked:
+ new_state = QtCore.Qt.Unchecked
+ else:
+ new_state = QtCore.Qt.Checked
+
+ index.model().setData(index, new_state, QtCore.Qt.CheckStateRole)
+
+ self.blockSignals(False)
+
+ self.active_changed.emit(self.get_enabled_families())
+
+ def _change_selection_state(self, checked):
+ indexes = self.selectionModel().selectedIndexes()
+ self._set_checkstates(checked, indexes)
+
+ def _on_data_change(self, *_args):
+ self.active_changed.emit(self.get_enabled_families())
+
+ def _on_context_menu(self, pos):
+ """Build RMB menu under mouse at current position (within widget)"""
+ menu = QtWidgets.QMenu(self)
+
+ # Add enable all action
+ action_check_all = QtWidgets.QAction(menu)
+ action_check_all.setText("Enable All")
+ action_check_all.triggered.connect(self.set_all_checked)
+ # Add disable all action
+ action_uncheck_all = QtWidgets.QAction(menu)
+ action_uncheck_all.setText("Disable All")
+ action_uncheck_all.triggered.connect(self.set_all_unchecked)
+
+ menu.addAction(action_check_all)
+ menu.addAction(action_uncheck_all)
+
+ # Get mouse position
+ global_pos = self.viewport().mapToGlobal(pos)
+ menu.exec_(global_pos)
+
+ def event(self, event):
+ if not event.type() == QtCore.QEvent.KeyPress:
+ pass
+
+ elif event.key() == QtCore.Qt.Key_Space:
+ self._change_selection_state(None)
+ return True
+
+ elif event.key() == QtCore.Qt.Key_Backspace:
+ self._change_selection_state(False)
+ return True
+
+ elif event.key() == QtCore.Qt.Key_Return:
+ self._change_selection_state(True)
+ return True
+
+ return super(FamilyListView, self).event(event)
+
+
+class RepresentationWidget(QtWidgets.QWidget):
+ load_started = QtCore.Signal()
+ load_ended = QtCore.Signal()
+
+ default_widths = (
+ ("name", 120),
+ ("subset", 125),
+ ("asset", 125),
+ ("active_site", 85),
+ ("remote_site", 85)
+ )
+
+ commands = {'active': 'Download', 'remote': 'Upload'}
+
+ def __init__(self, dbcon, tool_name=None, parent=None):
+ super(RepresentationWidget, self).__init__(parent=parent)
+ self.dbcon = dbcon
+ self.tool_name = tool_name
+
+ headers = [item[0] for item in self.default_widths]
+
+ model = RepresentationModel(self.dbcon, headers, [])
+
+ proxy_model = RepresentationSortProxyModel(self)
+ proxy_model.setSourceModel(model)
+
+ label = QtWidgets.QLabel("Representations", self)
+
+ tree_view = DeselectableTreeView()
+ tree_view.setModel(proxy_model)
+ tree_view.setAllColumnsShowFocus(True)
+ tree_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
+ tree_view.setSelectionMode(
+ QtWidgets.QAbstractItemView.ExtendedSelection)
+ tree_view.setSortingEnabled(True)
+ tree_view.sortByColumn(1, QtCore.Qt.AscendingOrder)
+ tree_view.setAlternatingRowColors(True)
+ tree_view.setIndentation(20)
+ tree_view.setStyleSheet("""
+ QTreeView::item{
+ padding: 5px 1px;
+ border: 0px;
+ }
+ """)
+ tree_view.collapseAll()
+
+ for column_name, width in self.default_widths:
+ idx = model.Columns.index(column_name)
+ tree_view.setColumnWidth(idx, width)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.setContentsMargins(0, 0, 0, 0)
+ layout.addWidget(label)
+ layout.addWidget(tree_view)
+
+ # self.itemChanged.connect(self._on_item_changed)
+ tree_view.customContextMenuRequested.connect(self.on_context_menu)
+
+ self.tree_view = tree_view
+ self.model = model
+ self.proxy_model = proxy_model
+
+ self.sync_server_enabled = False
+ actual_project = dbcon.Session["AVALON_PROJECT"]
+ self.on_project_change(actual_project)
+
+ self.model.refresh()
+
+ def on_project_change(self, project_name):
+ """
+ Called on each project change in parent widget.
+
+ Checks if Sync Server is enabled for a project, pushes changes to
+ model.
+ """
+ enabled = False
+ if project_name:
+ self.model.reset_sync_server(project_name)
+ if self.model.sync_server:
+ enabled_proj = self.model.sync_server.get_enabled_projects()
+ enabled = project_name in enabled_proj
+
+ self.sync_server_enabled = enabled
+ lib.change_visibility(self.model, self.tree_view,
+ "active_site", enabled)
+ lib.change_visibility(self.model, self.tree_view,
+ "remote_site", enabled)
+
+ def _repre_contexts_for_loaders_filter(self, items):
+ repre_ids = []
+ for item in items:
+ repre_ids.append(item["_id"])
+
+ repre_docs = list(self.dbcon.find(
+ {
+ "type": "representation",
+ "_id": {"$in": repre_ids}
+ },
+ {
+ "name": 1,
+ "parent": 1
+ }
+ ))
+ version_ids = [
+ repre_doc["parent"]
+ for repre_doc in repre_docs
+ ]
+ version_docs = self.dbcon.find({
+ "_id": {"$in": version_ids}
+ })
+
+ version_docs_by_id = {}
+ version_docs_by_subset_id = collections.defaultdict(list)
+ for version_doc in version_docs:
+ version_id = version_doc["_id"]
+ subset_id = version_doc["parent"]
+ version_docs_by_id[version_id] = version_doc
+ version_docs_by_subset_id[subset_id].append(version_doc)
+
+ subset_docs = list(self.dbcon.find(
+ {
+ "_id": {"$in": list(version_docs_by_subset_id.keys())},
+ "type": "subset"
+ },
+ {
+ "schema": 1,
+ "data.families": 1
+ }
+ ))
+ subset_docs_by_id = {
+ subset_doc["_id"]: subset_doc
+ for subset_doc in subset_docs
+ }
+ repre_context_by_id = {}
+ for repre_doc in repre_docs:
+ version_id = repre_doc["parent"]
+
+ version_doc = version_docs_by_id[version_id]
+ repre_context_by_id[repre_doc["_id"]] = {
+ "representation": repre_doc,
+ "version": version_doc,
+ "subset": subset_docs_by_id[version_doc["parent"]]
+ }
+ return repre_context_by_id
+
+ def on_context_menu(self, point):
+ """Shows menu with loader actions on Right-click.
+
+ Registered actions are filtered by selection and help of
+ `loaders_from_representation` from avalon api. Intersection of actions
+ is shown when more subset is selected. When there are not available
+ actions for selected subsets then special action is shown (works as
+ info message to user): "*No compatible loaders for your selection"
+
+ """
+ point_index = self.tree_view.indexAt(point)
+ if not point_index.isValid():
+ return
+
+ # Get selected subsets without groups
+ selection = self.tree_view.selectionModel()
+ rows = selection.selectedRows(column=0)
+
+ items = lib.get_selected_items(rows, self.model.ItemRole)
+
+ selected_side = self._get_selected_side(point_index, rows)
+
+ # Get all representation->loader combinations available for the
+ # index under the cursor, so we can list the user the options.
+ available_loaders = api.discover(api.Loader)
+
+ filtered_loaders = []
+ for loader in available_loaders:
+ # Skip subset loaders
+ if api.SubsetLoader in inspect.getmro(loader):
+ continue
+
+ if (
+ tools_lib.is_sync_loader(loader)
+ and not self.sync_server_enabled
+ ):
+ continue
+
+ filtered_loaders.append(loader)
+
+ if self.tool_name:
+ filtered_loaders = lib.remove_tool_name_from_loaders(
+ filtered_loaders, self.tool_name
+ )
+
+ loaders = list()
+ already_added_loaders = set()
+ label_already_in_menu = set()
+
+ repre_context_by_id = (
+ self._repre_contexts_for_loaders_filter(items)
+ )
+
+ for item in items:
+ repre_context = repre_context_by_id[item["_id"]]
+ for loader in pipeline.loaders_from_repre_context(
+ filtered_loaders,
+ repre_context
+ ):
+ if tools_lib.is_sync_loader(loader):
+ both_unavailable = (
+ item["active_site_progress"] <= 0
+ and item["remote_site_progress"] <= 0
+ )
+ if both_unavailable:
+ continue
+
+ for selected_side in self.commands.keys():
+ item = item.copy()
+ item["custom_label"] = None
+ label = None
+ selected_site_progress = item.get(
+ "{}_site_progress".format(selected_side), -1)
+
+ # only remove if actually present
+ if tools_lib.is_remove_site_loader(loader):
+ label = "Remove {}".format(selected_side)
+ if selected_site_progress < 1:
+ continue
+
+ if tools_lib.is_add_site_loader(loader):
+ label = self.commands[selected_side]
+ if selected_site_progress >= 0:
+ label = 'Re-{} {}'.format(label, selected_side)
+
+ if not label:
+ continue
+
+ item["selected_side"] = selected_side
+ item["custom_label"] = label
+
+ if label not in label_already_in_menu:
+ loaders.append((item, loader))
+ already_added_loaders.add(loader)
+ label_already_in_menu.add(label)
+
+ else:
+ item = item.copy()
+ item["custom_label"] = None
+
+ if loader not in already_added_loaders:
+ loaders.append((item, loader))
+ already_added_loaders.add(loader)
+
+ loaders = lib.sort_loaders(loaders)
+
+ menu = OptionalMenu(self)
+ if not loaders:
+ action = lib.get_no_loader_action(menu)
+ menu.addAction(action)
+ else:
+ repre_contexts = pipeline.get_repres_contexts(
+ repre_context_by_id.keys(), self.dbcon)
+ menu = lib.add_representation_loaders_to_menu(loaders, menu,
+ repre_contexts)
+
+ self._process_action(items, menu, point)
+
+ def _process_action(self, items, menu, point):
+ """
+ Show the context action menu and process selected
+
+ Args:
+ items(dict): menu items
+ menu(OptionalMenu)
+ point(PointIndex)
+ """
+ global_point = self.tree_view.mapToGlobal(point)
+ action = menu.exec_(global_point)
+
+ if not action or not action.data():
+ return
+
+ self.load_started.emit()
+
+ # Find the representation name and loader to trigger
+ action_representation, loader = action.data()
+ repre_ids = []
+ data_by_repre_id = {}
+ selected_side = action_representation.get("selected_side")
+
+ for item in items:
+ if tools_lib.is_sync_loader(loader):
+ site_name = "{}_site_name".format(selected_side)
+ data = {
+ "_id": item.get("_id"),
+ "site_name": item.get(site_name),
+ "project_name": self.dbcon.Session["AVALON_PROJECT"]
+ }
+
+ if not data["site_name"]:
+ continue
+
+ data_by_repre_id[data["_id"]] = data
+
+ repre_ids.append(item.get("_id"))
+
+ repre_contexts = pipeline.get_repres_contexts(repre_ids,
+ self.dbcon)
+ options = lib.get_options(action, loader, self,
+ list(repre_contexts.values()))
+
+ errors = _load_representations_by_loader(
+ loader, repre_contexts,
+ options=options, data_by_repre_id=data_by_repre_id)
+
+ self.model.refresh()
+
+ self.load_ended.emit()
+
+ if errors:
+ box = LoadErrorMessageBox(errors)
+ box.show()
+
+ def _get_optional_labels(self, loaders, selected_side):
+ """Each loader could have specific label
+
+ Args:
+ loaders (tuple of dict, dict): (item, loader)
+ selected_side(string): active or remote
+
+ Returns:
+ (dict) {loader: string}
+ """
+ optional_labels = {}
+ if selected_side:
+ if selected_side == 'active':
+ txt = "Localize"
+ else:
+ txt = "Sync to Remote"
+ optional_labels = {loader: txt for _, loader in loaders
+ if tools_lib.is_sync_loader(loader)}
+ return optional_labels
+
+ def _get_selected_side(self, point_index, rows):
+ """Returns active/remote label according to column in 'point_index'"""
+ selected_side = None
+ if self.sync_server_enabled:
+ if rows:
+ source_index = self.proxy_model.mapToSource(point_index)
+ selected_side = self.model.data(source_index,
+ self.model.SiteSideRole)
+ return selected_side
+
+ def set_version_ids(self, version_ids):
+ self.model.set_version_ids(version_ids)
+
+ def _set_download(self):
+ pass
+
+ def change_visibility(self, column_name, visible):
+ """
+ Hides or shows particular 'column_name'.
+
+ "asset" and "subset" columns should be visible only in multiselect
+ """
+ lib.change_visibility(self.model, self.tree_view, column_name, visible)
+
+
+def _load_representations_by_loader(loader, repre_contexts,
+ options,
+ data_by_repre_id=None):
+ """Loops through list of repre_contexts and loads them with one loader
+
+ Args:
+ loader (cls of api.Loader) - not initialized yet
+ repre_contexts (dicts) - full info about selected representations
+ (containing repre_doc, version_doc, subset_doc, project info)
+ options (dict) - qargparse arguments to fill OptionDialog
+ data_by_repre_id (dict) - additional data applicable on top of
+ options to provide dynamic values
+ """
+ error_info = []
+
+ if options is None: # not load when cancelled
+ return
+
+ for repre_context in repre_contexts.values():
+ try:
+ if data_by_repre_id:
+ _id = repre_context["representation"]["_id"]
+ data = data_by_repre_id.get(_id)
+ options.update(data)
+ pipeline.load_with_repre_context(
+ loader,
+ repre_context,
+ options=options
+ )
+ except pipeline.IncompatibleLoaderError as exc:
+ print(exc)
+ error_info.append((
+ "Incompatible Loader",
+ None,
+ repre_context["representation"]["name"],
+ repre_context["subset"]["name"],
+ repre_context["version"]["name"]
+ ))
+
+ except Exception as exc:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ formatted_traceback = "".join(traceback.format_exception(
+ exc_type, exc_value, exc_traceback
+ ))
+ error_info.append((
+ str(exc),
+ formatted_traceback,
+ repre_context["representation"]["name"],
+ repre_context["subset"]["name"],
+ repre_context["version"]["name"]
+ ))
+ return error_info
+
+
+def _load_subsets_by_loader(loader, subset_contexts, options,
+ subset_version_docs=None):
+ """
+ Triggers load with SubsetLoader type of loaders
+
+ Args:
+ loader (SubsetLoder):
+ subset_contexts (list):
+ options (dict):
+ subset_version_docs (dict): {subset_id: version_doc}
+ """
+ error_info = []
+
+ if options is None: # not load when cancelled
+ return
+
+ if loader.is_multiple_contexts_compatible:
+ subset_names = []
+ for context in subset_contexts:
+ subset_name = context.get("subset", {}).get("name") or "N/A"
+ subset_names.append(subset_name)
+
+ context["version"] = subset_version_docs[context["subset"]["_id"]]
+ try:
+ pipeline.load_with_subset_contexts(
+ loader,
+ subset_contexts,
+ options=options
+ )
+ except Exception as exc:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ formatted_traceback = "".join(
+ traceback.format_exception(
+ exc_type, exc_value, exc_traceback
+ )
+ )
+ error_info.append((
+ str(exc),
+ formatted_traceback,
+ None,
+ ", ".join(subset_names),
+ None
+ ))
+ else:
+ for subset_context in subset_contexts:
+ subset_name = subset_context.get("subset", {}).get("name") or "N/A"
+
+ version_doc = subset_version_docs[subset_context["subset"]["_id"]]
+ subset_context["version"] = version_doc
+ try:
+ pipeline.load_with_subset_context(
+ loader,
+ subset_context,
+ options=options
+ )
+ except Exception as exc:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ formatted_traceback = "\n".join(
+ traceback.format_exception(
+ exc_type, exc_value, exc_traceback
+ )
+ )
+ error_info.append((
+ str(exc),
+ formatted_traceback,
+ None,
+ subset_name,
+ None
+ ))
+
+ return error_info
diff --git a/openpype/tools/project_manager/project_manager/__init__.py b/openpype/tools/project_manager/project_manager/__init__.py
index 49ade4a989..6e44afd841 100644
--- a/openpype/tools/project_manager/project_manager/__init__.py
+++ b/openpype/tools/project_manager/project_manager/__init__.py
@@ -1,9 +1,11 @@
__all__ = (
"IDENTIFIER_ROLE",
+ "PROJECT_NAME_ROLE",
"HierarchyView",
"ProjectModel",
+ "ProjectProxyFilter",
"CreateProjectDialog",
"HierarchyModel",
@@ -20,12 +22,14 @@ __all__ = (
from .constants import (
- IDENTIFIER_ROLE
+ IDENTIFIER_ROLE,
+ PROJECT_NAME_ROLE
)
from .widgets import CreateProjectDialog
from .view import HierarchyView
from .model import (
ProjectModel,
+ ProjectProxyFilter,
HierarchyModel,
HierarchySelectionModel,
diff --git a/openpype/tools/project_manager/project_manager/constants.py b/openpype/tools/project_manager/project_manager/constants.py
index 67dea79e59..7ca4aa9492 100644
--- a/openpype/tools/project_manager/project_manager/constants.py
+++ b/openpype/tools/project_manager/project_manager/constants.py
@@ -17,6 +17,9 @@ ITEM_TYPE_ROLE = QtCore.Qt.UserRole + 5
# Item has opened editor (per column)
EDITOR_OPENED_ROLE = QtCore.Qt.UserRole + 6
+# Role for project model
+PROJECT_NAME_ROLE = QtCore.Qt.UserRole + 7
+
# Allowed symbols for any name
NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_"
NAME_REGEX = re.compile("^[" + NAME_ALLOWED_SYMBOLS + "]*$")
diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py
index 7ee43a6b61..5b6ed78b50 100644
--- a/openpype/tools/project_manager/project_manager/model.py
+++ b/openpype/tools/project_manager/project_manager/model.py
@@ -9,7 +9,8 @@ from .constants import (
DUPLICATED_ROLE,
HIERARCHY_CHANGE_ABLE_ROLE,
REMOVED_ROLE,
- EDITOR_OPENED_ROLE
+ EDITOR_OPENED_ROLE,
+ PROJECT_NAME_ROLE
)
from .style import ResourceCache
@@ -29,7 +30,7 @@ class ProjectModel(QtGui.QStandardItemModel):
def __init__(self, dbcon, *args, **kwargs):
self.dbcon = dbcon
- self._project_names = set()
+ self._items_by_name = {}
super(ProjectModel, self).__init__(*args, **kwargs)
@@ -37,33 +38,62 @@ class ProjectModel(QtGui.QStandardItemModel):
"""Reload projects."""
self.dbcon.Session["AVALON_PROJECT"] = None
- project_items = []
+ new_project_items = []
- none_project = QtGui.QStandardItem("< Select Project >")
- none_project.setData(None)
- project_items.append(none_project)
+ if None not in self._items_by_name:
+ none_project = QtGui.QStandardItem("< Select Project >")
+ self._items_by_name[None] = none_project
+ new_project_items.append(none_project)
- database = self.dbcon.database
+ project_docs = self.dbcon.projects(
+ projection={"name": 1},
+ only_active=True
+ )
project_names = set()
- for project_name in database.collection_names():
- # Each collection will have exactly one project document
- project_doc = database[project_name].find_one(
- {"type": "project"},
- {"name": 1}
- )
- if not project_doc:
+ for project_doc in project_docs:
+ project_name = project_doc.get("name")
+ if not project_name:
continue
- project_name = project_doc.get("name")
- if project_name:
- project_names.add(project_name)
- project_items.append(QtGui.QStandardItem(project_name))
+ project_names.add(project_name)
+ if project_name not in self._items_by_name:
+ project_item = QtGui.QStandardItem(project_name)
+ project_item.setData(project_name, PROJECT_NAME_ROLE)
- self.clear()
+ self._items_by_name[project_name] = project_item
+ new_project_items.append(project_item)
- self._project_names = project_names
+ root_item = self.invisibleRootItem()
+ for project_name in tuple(self._items_by_name.keys()):
+ if project_name is None or project_name in project_names:
+ continue
+ project_item = self._items_by_name.pop(project_name)
+ root_item.removeRow(project_item.row())
- self.invisibleRootItem().appendRows(project_items)
+ if new_project_items:
+ root_item.appendRows(new_project_items)
+
+
+class ProjectProxyFilter(QtCore.QSortFilterProxyModel):
+ """Filters default project item."""
+ def __init__(self, *args, **kwargs):
+ super(ProjectProxyFilter, self).__init__(*args, **kwargs)
+ self._filter_default = False
+
+ def set_filter_default(self, enabled=True):
+ """Set if filtering of default item is enabled."""
+ if enabled == self._filter_default:
+ return
+ self._filter_default = enabled
+ self.invalidateFilter()
+
+ def filterAcceptsRow(self, row, parent):
+ if not self._filter_default:
+ return True
+
+ model = self.sourceModel()
+ source_index = model.index(row, self.filterKeyColumn(), parent)
+ return source_index.data(PROJECT_NAME_ROLE) is not None
class HierarchySelectionModel(QtCore.QItemSelectionModel):
diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py
index 7c71f4b451..a19031ceda 100644
--- a/openpype/tools/project_manager/project_manager/window.py
+++ b/openpype/tools/project_manager/project_manager/window.py
@@ -2,19 +2,26 @@ from Qt import QtWidgets, QtCore, QtGui
from . import (
ProjectModel,
+ ProjectProxyFilter,
HierarchyModel,
HierarchySelectionModel,
HierarchyView,
- CreateProjectDialog
+ CreateProjectDialog,
+ PROJECT_NAME_ROLE
)
-from openpype.style import load_stylesheet
from .style import ResourceCache
+from openpype.style import load_stylesheet
from openpype.lib import is_admin_password_required
from openpype.widgets import PasswordDialog
from openpype import resources
+from openpype.api import (
+ get_project_basic_paths,
+ create_project_folders,
+ Logger
+)
from avalon.api import AvalonMongoDB
@@ -24,12 +31,14 @@ class ProjectManagerWindow(QtWidgets.QWidget):
def __init__(self, parent=None):
super(ProjectManagerWindow, self).__init__(parent)
+ self.log = Logger.get_logger(self.__class__.__name__)
+
self._initial_reset = False
self._password_dialog = None
self._user_passed = False
self.setWindowTitle("OpenPype Project Manager")
- self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath()))
+ self.setWindowIcon(QtGui.QIcon(resources.get_openpype_icon_filepath()))
# Top part of window
top_part_widget = QtWidgets.QWidget(self)
@@ -40,11 +49,15 @@ class ProjectManagerWindow(QtWidgets.QWidget):
dbcon = AvalonMongoDB()
project_model = ProjectModel(dbcon)
+ project_proxy = ProjectProxyFilter()
+ project_proxy.setSourceModel(project_model)
+ project_proxy.setDynamicSortFilter(True)
+
project_combobox = QtWidgets.QComboBox(project_widget)
project_combobox.setSizeAdjustPolicy(
QtWidgets.QComboBox.AdjustToContents
)
- project_combobox.setModel(project_model)
+ project_combobox.setModel(project_proxy)
project_combobox.setRootModelIndex(QtCore.QModelIndex())
style_delegate = QtWidgets.QStyledItemDelegate()
project_combobox.setItemDelegate(style_delegate)
@@ -57,12 +70,19 @@ class ProjectManagerWindow(QtWidgets.QWidget):
create_project_btn = QtWidgets.QPushButton(
"Create project...", project_widget
)
+ create_folders_btn = QtWidgets.QPushButton(
+ ResourceCache.get_icon("asset", "default"),
+ "Create Starting Folders",
+ project_widget
+ )
+ create_folders_btn.setEnabled(False)
project_layout = QtWidgets.QHBoxLayout(project_widget)
project_layout.setContentsMargins(0, 0, 0, 0)
project_layout.addWidget(project_combobox, 0)
project_layout.addWidget(refresh_projects_btn, 0)
project_layout.addWidget(create_project_btn, 0)
+ project_layout.addWidget(create_folders_btn)
project_layout.addStretch(1)
# Helper buttons
@@ -124,12 +144,14 @@ class ProjectManagerWindow(QtWidgets.QWidget):
refresh_projects_btn.clicked.connect(self._on_project_refresh)
create_project_btn.clicked.connect(self._on_project_create)
+ create_folders_btn.clicked.connect(self._on_create_folders)
project_combobox.currentIndexChanged.connect(self._on_project_change)
save_btn.clicked.connect(self._on_save_click)
add_asset_btn.clicked.connect(self._on_add_asset)
add_task_btn.clicked.connect(self._on_add_task)
self._project_model = project_model
+ self._project_proxy_model = project_proxy
self.hierarchy_view = hierarchy_view
self.hierarchy_model = hierarchy_model
@@ -139,6 +161,7 @@ class ProjectManagerWindow(QtWidgets.QWidget):
self._refresh_projects_btn = refresh_projects_btn
self._project_combobox = project_combobox
self._create_project_btn = create_project_btn
+ self._create_folders_btn = create_folders_btn
self._add_asset_btn = add_asset_btn
self._add_task_btn = add_task_btn
@@ -147,8 +170,17 @@ class ProjectManagerWindow(QtWidgets.QWidget):
self.setStyleSheet(load_stylesheet())
def _set_project(self, project_name=None):
+ self._create_folders_btn.setEnabled(project_name is not None)
+ self._project_proxy_model.set_filter_default(project_name is not None)
self.hierarchy_view.set_project(project_name)
+ def _current_project(self):
+ row = self._project_combobox.currentIndex()
+ if row < 0:
+ return None
+ index = self._project_proxy_model.index(row, 0)
+ return index.data(PROJECT_NAME_ROLE)
+
def showEvent(self, event):
super(ProjectManagerWindow, self).showEvent(event)
@@ -167,6 +199,7 @@ class ProjectManagerWindow(QtWidgets.QWidget):
project_name = self._project_combobox.currentText()
self._project_model.refresh()
+ self._project_proxy_model.sort(0, QtCore.Qt.AscendingOrder)
if self._project_combobox.count() == 0:
return self._set_project()
@@ -176,10 +209,12 @@ class ProjectManagerWindow(QtWidgets.QWidget):
if row >= 0:
self._project_combobox.setCurrentIndex(row)
- self._set_project(self._project_combobox.currentText())
+ selected_project = self._current_project()
+ self._set_project(selected_project)
def _on_project_change(self):
- self._set_project(self._project_combobox.currentText())
+ selected_project = self._current_project()
+ self._set_project(selected_project)
def _on_project_refresh(self):
self.refresh_projects()
@@ -193,6 +228,30 @@ class ProjectManagerWindow(QtWidgets.QWidget):
def _on_add_task(self):
self.hierarchy_view.add_task()
+ def _on_create_folders(self):
+ project_name = self._current_project()
+ if not project_name:
+ return
+
+ qm = QtWidgets.QMessageBox
+ ans = qm.question(self,
+ "OpenPype Project Manager",
+ "Confirm to create starting project folders?",
+ qm.Yes | qm.No)
+ if ans == qm.Yes:
+ try:
+ # Get paths based on presets
+ basic_paths = get_project_basic_paths(project_name)
+ if not basic_paths:
+ pass
+ # Invoking OpenPype API to create the project folders
+ create_project_folders(basic_paths, project_name)
+ except Exception as exc:
+ self.log.warning(
+ "Cannot create starting folders: {}".format(exc),
+ exc_info=True
+ )
+
def show_message(self, message):
# TODO add nicer message pop
self.message_label.setText(message)
diff --git a/openpype/tools/settings/local_settings/projects_widget.py b/openpype/tools/settings/local_settings/projects_widget.py
index a48c504d59..9cd3b9a38e 100644
--- a/openpype/tools/settings/local_settings/projects_widget.py
+++ b/openpype/tools/settings/local_settings/projects_widget.py
@@ -809,7 +809,7 @@ class ProjectSettingsWidget(QtWidgets.QWidget):
self.modules_manager = modules_manager
- projects_widget = _ProjectListWidget(self)
+ projects_widget = _ProjectListWidget(self, only_active=True)
roos_site_widget = RootSiteWidget(
modules_manager, project_settings, self
)
diff --git a/openpype/tools/settings/settings/base.py b/openpype/tools/settings/settings/base.py
index 8235cf8642..ab6b27bdaf 100644
--- a/openpype/tools/settings/settings/base.py
+++ b/openpype/tools/settings/settings/base.py
@@ -3,6 +3,7 @@ import json
from Qt import QtWidgets, QtGui, QtCore
from openpype.tools.settings import CHILD_OFFSET
from .widgets import ExpandingWidget
+from .lib import create_deffered_value_change_timer
class BaseWidget(QtWidgets.QWidget):
@@ -329,6 +330,20 @@ class BaseWidget(QtWidgets.QWidget):
class InputWidget(BaseWidget):
+ def __init__(self, *args, **kwargs):
+ super(InputWidget, self).__init__(*args, **kwargs)
+
+ # Input widgets have always timer available (but may not be used).
+ self._value_change_timer = create_deffered_value_change_timer(
+ self._on_value_change_timer
+ )
+
+ def start_value_timer(self):
+ self._value_change_timer.start()
+
+ def _on_value_change_timer(self):
+ pass
+
def create_ui(self):
if self.entity.use_label_wrap:
label = None
diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py
index c420a8cdc5..be2264340b 100644
--- a/openpype/tools/settings/settings/categories.py
+++ b/openpype/tools/settings/settings/categories.py
@@ -609,14 +609,23 @@ class ProjectWidget(SettingsCategoryWidget):
self.project_list_widget.refresh()
def _on_reset_crash(self):
- self.project_list_widget.setEnabled(False)
+ self._set_enabled_project_list(False)
super(ProjectWidget, self)._on_reset_crash()
def _on_reset_success(self):
- if not self.project_list_widget.isEnabled():
- self.project_list_widget.setEnabled(True)
+ self._set_enabled_project_list(True)
super(ProjectWidget, self)._on_reset_success()
+ def _set_enabled_project_list(self, enabled):
+ if (
+ enabled
+ and self.modify_defaults_checkbox
+ and self.modify_defaults_checkbox.isChecked()
+ ):
+ enabled = False
+ if self.project_list_widget.isEnabled() != enabled:
+ self.project_list_widget.setEnabled(enabled)
+
def _create_root_entity(self):
self.entity = ProjectSettings(change_state=False)
self.entity.on_change_callbacks.append(self._on_entity_change)
@@ -637,7 +646,8 @@ class ProjectWidget(SettingsCategoryWidget):
if self.modify_defaults_checkbox:
self.modify_defaults_checkbox.setEnabled(True)
- self.project_list_widget.setEnabled(True)
+
+ self._set_enabled_project_list(True)
except DefaultsNotDefined:
if not self.modify_defaults_checkbox:
@@ -646,7 +656,7 @@ class ProjectWidget(SettingsCategoryWidget):
self.entity.set_defaults_state()
self.modify_defaults_checkbox.setChecked(True)
self.modify_defaults_checkbox.setEnabled(False)
- self.project_list_widget.setEnabled(False)
+ self._set_enabled_project_list(False)
except StudioDefaultsNotDefined:
self.select_default_project()
@@ -666,8 +676,10 @@ class ProjectWidget(SettingsCategoryWidget):
def _on_modify_defaults(self):
if self.modify_defaults_checkbox.isChecked():
+ self._set_enabled_project_list(False)
if not self.entity.is_in_defaults_state():
self.reset()
else:
+ self._set_enabled_project_list(True)
if not self.entity.is_in_studio_state():
self.reset()
diff --git a/openpype/tools/settings/settings/constants.py b/openpype/tools/settings/settings/constants.py
new file mode 100644
index 0000000000..5c20bf1afe
--- /dev/null
+++ b/openpype/tools/settings/settings/constants.py
@@ -0,0 +1,16 @@
+from Qt import QtCore
+
+
+DEFAULT_PROJECT_LABEL = "< Default >"
+PROJECT_NAME_ROLE = QtCore.Qt.UserRole + 1
+PROJECT_IS_ACTIVE_ROLE = QtCore.Qt.UserRole + 2
+PROJECT_IS_SELECTED_ROLE = QtCore.Qt.UserRole + 3
+
+
+__all__ = (
+ "DEFAULT_PROJECT_LABEL",
+
+ "PROJECT_NAME_ROLE",
+ "PROJECT_IS_ACTIVE_ROLE",
+ "PROJECT_IS_SELECTED_ROLE"
+)
diff --git a/openpype/tools/settings/settings/dict_mutable_widget.py b/openpype/tools/settings/settings/dict_mutable_widget.py
index ba86fe82dd..cfb9d4a4b1 100644
--- a/openpype/tools/settings/settings/dict_mutable_widget.py
+++ b/openpype/tools/settings/settings/dict_mutable_widget.py
@@ -3,6 +3,7 @@ from uuid import uuid4
from Qt import QtWidgets, QtCore, QtGui
from .base import BaseWidget
+from .lib import create_deffered_value_change_timer
from .widgets import (
ExpandingWidget,
IconButton
@@ -284,6 +285,10 @@ class ModifiableDictItem(QtWidgets.QWidget):
self.confirm_btn = None
+ self._key_change_timer = create_deffered_value_change_timer(
+ self._on_timeout
+ )
+
if collapsible_key:
self.create_collapsible_ui()
else:
@@ -516,6 +521,10 @@ class ModifiableDictItem(QtWidgets.QWidget):
if self.ignore_input_changes:
return
+ self._key_change_timer.start()
+
+ def _on_timeout(self):
+ key = self.key_value()
is_key_duplicated = self.entity_widget.validate_key_duplication(
self.temp_key, key, self
)
diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py
index 736ba77652..a28bee8d36 100644
--- a/openpype/tools/settings/settings/item_widgets.py
+++ b/openpype/tools/settings/settings/item_widgets.py
@@ -400,7 +400,9 @@ class TextWidget(InputWidget):
def _on_value_change(self):
if self.ignore_input_changes:
return
+ self.start_value_timer()
+ def _on_value_change_timer(self):
self.entity.set(self.input_value())
@@ -411,7 +413,8 @@ class NumberWidget(InputWidget):
kwargs = {
"minimum": self.entity.minimum,
"maximum": self.entity.maximum,
- "decimal": self.entity.decimal
+ "decimal": self.entity.decimal,
+ "steps": self.entity.steps
}
self.input_field = NumberSpinBox(self.content_widget, **kwargs)
input_field_stretch = 1
@@ -426,6 +429,10 @@ class NumberWidget(InputWidget):
int(self.entity.minimum * slider_multiplier),
int(self.entity.maximum * slider_multiplier)
)
+ if self.entity.steps is not None:
+ slider_widget.setSingleStep(
+ self.entity.steps * slider_multiplier
+ )
self.content_layout.addWidget(slider_widget, 1)
@@ -469,6 +476,9 @@ class NumberWidget(InputWidget):
if self.ignore_input_changes:
return
+ self.start_value_timer()
+
+ def _on_value_change_timer(self):
value = self.input_field.value()
if self._slider_widget is not None and not self._ignore_input_change:
self._ignore_slider_change = True
@@ -566,7 +576,9 @@ class RawJsonWidget(InputWidget):
def _on_value_change(self):
if self.ignore_input_changes:
return
+ self.start_value_timer()
+ def _on_value_change_timer(self):
self._is_invalid = self.input_field.has_invalid_value()
if not self.is_invalid:
self.entity.set(self.input_field.json_value())
@@ -781,4 +793,7 @@ class PathInputWidget(InputWidget):
def _on_value_change(self):
if self.ignore_input_changes:
return
+ self.start_value_timer()
+
+ def _on_value_change_timer(self):
self.entity.set(self.input_value())
diff --git a/openpype/tools/settings/settings/lib.py b/openpype/tools/settings/settings/lib.py
new file mode 100644
index 0000000000..577aaa5671
--- /dev/null
+++ b/openpype/tools/settings/settings/lib.py
@@ -0,0 +1,18 @@
+from Qt import QtCore
+
+# Offset of value change trigger in ms
+VALUE_CHANGE_OFFSET_MS = 300
+
+
+def create_deffered_value_change_timer(callback):
+ """Deffer value change callback.
+
+ UI won't trigger all callbacks on each value change but after predefined
+ time. Timer is reset on each start so callback is triggered after user
+ finish editing.
+ """
+ timer = QtCore.QTimer()
+ timer.setSingleShot(True)
+ timer.setInterval(VALUE_CHANGE_OFFSET_MS)
+ timer.timeout.connect(callback)
+ return timer
diff --git a/openpype/tools/settings/settings/style/__init__.py b/openpype/tools/settings/settings/style/__init__.py
index 5a57642ee1..f1d9829a04 100644
--- a/openpype/tools/settings/settings/style/__init__.py
+++ b/openpype/tools/settings/settings/style/__init__.py
@@ -10,4 +10,4 @@ def load_stylesheet():
def app_icon_path():
- return resources.pype_icon_filepath()
+ return resources.get_openpype_icon_filepath()
diff --git a/openpype/tools/settings/settings/style/style.css b/openpype/tools/settings/settings/style/style.css
index d9d85a481e..b77b575204 100644
--- a/openpype/tools/settings/settings/style/style.css
+++ b/openpype/tools/settings/settings/style/style.css
@@ -146,6 +146,15 @@ QSlider::handle:vertical {
border: 1px solid #464b54;
background: #21252B;
}
+
+#ProjectListWidget QListView:disabled {
+ background: #282C34;
+}
+
+#ProjectListWidget QListView::item:disabled {
+ color: #4e5254;
+}
+
#ProjectListWidget QLabel {
background: transparent;
font-weight: bold;
@@ -249,8 +258,6 @@ QTabBar::tab:!selected:hover {
background: #333840;
}
-
-
QTabBar::tab:first:selected {
margin-left: 0;
}
@@ -405,12 +412,15 @@ QHeaderView::section {
font-weight: bold;
}
-QTableView::item:pressed, QListView::item:pressed, QTreeView::item:pressed {
+QAbstractItemView::item:pressed {
background: #78879b;
color: #FFFFFF;
}
-QTableView::item:selected:active, QTreeView::item:selected:active, QListView::item:selected:active {
+QAbstractItemView::item:selected:active {
+ background: #3d8ec9;
+}
+QAbstractItemView::item:selected:!active {
background: #3d8ec9;
}
diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py
index b821c3bb2c..710884e9e5 100644
--- a/openpype/tools/settings/settings/widgets.py
+++ b/openpype/tools/settings/settings/widgets.py
@@ -7,6 +7,12 @@ from avalon.mongodb import (
)
from openpype.settings.lib import get_system_settings
+from .constants import (
+ DEFAULT_PROJECT_LABEL,
+ PROJECT_NAME_ROLE,
+ PROJECT_IS_ACTIVE_ROLE,
+ PROJECT_IS_SELECTED_ROLE
+)
class SettingsLineEdit(QtWidgets.QLineEdit):
@@ -92,11 +98,15 @@ class NumberSpinBox(QtWidgets.QDoubleSpinBox):
min_value = kwargs.pop("minimum", -99999)
max_value = kwargs.pop("maximum", 99999)
decimals = kwargs.pop("decimal", 0)
+ steps = kwargs.pop("steps", None)
+
super(NumberSpinBox, self).__init__(*args, **kwargs)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setDecimals(decimals)
self.setMinimum(min_value)
self.setMaximum(max_value)
+ if steps is not None:
+ self.setSingleStep(steps)
def focusInEvent(self, event):
super(NumberSpinBox, self).focusInEvent(event)
@@ -598,6 +608,65 @@ class NiceCheckbox(QtWidgets.QFrame):
return super(NiceCheckbox, self).mouseReleaseEvent(event)
+class ProjectModel(QtGui.QStandardItemModel):
+ def __init__(self, only_active, *args, **kwargs):
+ super(ProjectModel, self).__init__(*args, **kwargs)
+
+ self.dbcon = None
+
+ self._only_active = only_active
+ self._default_item = None
+ self._items_by_name = {}
+
+ def set_dbcon(self, dbcon):
+ self.dbcon = dbcon
+
+ def refresh(self):
+ new_items = []
+ if self._default_item is None:
+ item = QtGui.QStandardItem(DEFAULT_PROJECT_LABEL)
+ item.setData(None, PROJECT_NAME_ROLE)
+ item.setData(True, PROJECT_IS_ACTIVE_ROLE)
+ item.setData(False, PROJECT_IS_SELECTED_ROLE)
+ new_items.append(item)
+ self._default_item = item
+
+ project_names = set()
+ if self.dbcon is not None:
+ for project_doc in self.dbcon.projects(
+ projection={"name": 1, "data.active": 1},
+ only_active=self._only_active
+ ):
+ project_name = project_doc["name"]
+ project_names.add(project_name)
+ if project_name in self._items_by_name:
+ item = self._items_by_name[project_name]
+ else:
+ item = QtGui.QStandardItem(project_name)
+
+ self._items_by_name[project_name] = item
+ new_items.append(item)
+
+ is_active = project_doc.get("data", {}).get("active", True)
+ item.setData(project_name, PROJECT_NAME_ROLE)
+ item.setData(is_active, PROJECT_IS_ACTIVE_ROLE)
+ item.setData(False, PROJECT_IS_SELECTED_ROLE)
+
+ if not is_active:
+ font = item.font()
+ font.setItalic(True)
+ item.setFont(font)
+
+ root_item = self.invisibleRootItem()
+ for project_name in tuple(self._items_by_name.keys()):
+ if project_name not in project_names:
+ item = self._items_by_name.pop(project_name)
+ root_item.removeRow(item.row())
+
+ if new_items:
+ root_item.appendRows(new_items)
+
+
class ProjectListView(QtWidgets.QListView):
left_mouse_released_at = QtCore.Signal(QtCore.QModelIndex)
@@ -608,11 +677,51 @@ class ProjectListView(QtWidgets.QListView):
super(ProjectListView, self).mouseReleaseEvent(event)
+class ProjectSortFilterProxy(QtCore.QSortFilterProxyModel):
+ def __init__(self, *args, **kwargs):
+ super(ProjectSortFilterProxy, self).__init__(*args, **kwargs)
+ self._enable_filter = True
+
+ def lessThan(self, left_index, right_index):
+ if left_index.data(PROJECT_NAME_ROLE) is None:
+ return True
+
+ if right_index.data(PROJECT_NAME_ROLE) is None:
+ return False
+
+ left_is_active = left_index.data(PROJECT_IS_ACTIVE_ROLE)
+ right_is_active = right_index.data(PROJECT_IS_ACTIVE_ROLE)
+ if right_is_active == left_is_active:
+ return super(ProjectSortFilterProxy, self).lessThan(
+ left_index, right_index
+ )
+
+ if left_is_active:
+ return True
+ return False
+
+ def filterAcceptsRow(self, source_row, source_parent):
+ if not self._enable_filter:
+ return True
+
+ index = self.sourceModel().index(source_row, 0, source_parent)
+ is_active = bool(index.data(self.filterRole()))
+ is_selected = bool(index.data(PROJECT_IS_SELECTED_ROLE))
+
+ return is_active or is_selected
+
+ def is_filter_enabled(self):
+ return self._enable_filter
+
+ def set_filter_enabled(self, value):
+ self._enable_filter = value
+ self.invalidateFilter()
+
+
class ProjectListWidget(QtWidgets.QWidget):
- default = "< Default >"
project_changed = QtCore.Signal()
- def __init__(self, parent):
+ def __init__(self, parent, only_active=False):
self._parent = parent
self.current_project = None
@@ -621,8 +730,14 @@ class ProjectListWidget(QtWidgets.QWidget):
self.setObjectName("ProjectListWidget")
label_widget = QtWidgets.QLabel("Projects")
+
project_list = ProjectListView(self)
- project_list.setModel(QtGui.QStandardItemModel())
+ project_model = ProjectModel(only_active)
+ project_proxy = ProjectSortFilterProxy()
+
+ project_proxy.setFilterRole(PROJECT_IS_ACTIVE_ROLE)
+ project_proxy.setSourceModel(project_model)
+ project_list.setModel(project_proxy)
# Do not allow editing
project_list.setEditTriggers(
@@ -636,9 +751,26 @@ class ProjectListWidget(QtWidgets.QWidget):
layout.addWidget(label_widget, 0)
layout.addWidget(project_list, 1)
+ if only_active:
+ inactive_chk = None
+ else:
+ inactive_chk = QtWidgets.QCheckBox(" Show Inactive Projects ")
+ inactive_chk.setChecked(not project_proxy.is_filter_enabled())
+
+ layout.addSpacing(5)
+ layout.addWidget(inactive_chk, 0)
+ layout.addSpacing(5)
+
+ inactive_chk.stateChanged.connect(self.on_inactive_vis_changed)
+
project_list.left_mouse_released_at.connect(self.on_item_clicked)
+ self._default_project_item = None
+
self.project_list = project_list
+ self.project_proxy = project_proxy
+ self.project_model = project_model
+ self.inactive_chk = inactive_chk
self.dbcon = None
@@ -675,24 +807,38 @@ class ProjectListWidget(QtWidgets.QWidget):
else:
self.select_project(self.current_project)
+ def on_inactive_vis_changed(self):
+ if self.inactive_chk is None:
+ # should not happen.
+ return
+
+ enable_filter = not self.inactive_chk.isChecked()
+ self.project_proxy.set_filter_enabled(enable_filter)
+
def validate_context_change(self):
return not self._parent.entity.has_unsaved_changes
def project_name(self):
- if self.current_project == self.default:
+ if self.current_project == DEFAULT_PROJECT_LABEL:
return None
return self.current_project
def select_default_project(self):
- self.select_project(self.default)
+ self.select_project(DEFAULT_PROJECT_LABEL)
def select_project(self, project_name):
- model = self.project_list.model()
+ model = self.project_model
+ proxy = self.project_proxy
+
found_items = model.findItems(project_name)
if not found_items:
- found_items = model.findItems(self.default)
+ found_items = model.findItems(DEFAULT_PROJECT_LABEL)
index = model.indexFromItem(found_items[0])
+ model.setData(index, True, PROJECT_IS_SELECTED_ROLE)
+
+ index = proxy.mapFromSource(index)
+
self.project_list.selectionModel().clear()
self.project_list.selectionModel().setCurrentIndex(
index, QtCore.QItemSelectionModel.SelectionFlag.SelectCurrent
@@ -704,11 +850,6 @@ class ProjectListWidget(QtWidgets.QWidget):
selected_project = index.data(QtCore.Qt.DisplayRole)
break
- model = self.project_list.model()
- model.clear()
-
- items = [self.default]
-
mongo_url = os.environ["OPENPYPE_MONGO"]
# Force uninstall of whole avalon connection if url does not match
@@ -726,17 +867,10 @@ class ProjectListWidget(QtWidgets.QWidget):
self.dbcon = None
self.current_project = None
- if self.dbcon:
- database = self.dbcon.database
- for project_name in database.collection_names():
- project_doc = database[project_name].find_one(
- {"type": "project"},
- {"name": 1}
- )
- if project_doc:
- items.append(project_doc["name"])
- for item in items:
- model.appendRow(QtGui.QStandardItem(item))
+ self.project_model.set_dbcon(self.dbcon)
+ self.project_model.refresh()
+
+ self.project_proxy.sort(0)
self.select_project(selected_project)
diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py
index 81a53c52b8..2ce757f773 100644
--- a/openpype/tools/standalonepublish/app.py
+++ b/openpype/tools/standalonepublish/app.py
@@ -231,7 +231,7 @@ def main():
qt_app = QtWidgets.QApplication([])
# app.setQuitOnLastWindowClosed(False)
qt_app.setStyleSheet(style.load_stylesheet())
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
qt_app.setWindowIcon(icon)
def signal_handler(sig, frame):
diff --git a/openpype/tools/standalonepublish/widgets/widget_asset.py b/openpype/tools/standalonepublish/widgets/widget_asset.py
index c39d71b055..eb22883c11 100644
--- a/openpype/tools/standalonepublish/widgets/widget_asset.py
+++ b/openpype/tools/standalonepublish/widgets/widget_asset.py
@@ -273,8 +273,11 @@ class AssetWidget(QtWidgets.QWidget):
def _set_projects(self):
project_names = list()
- for project in self.dbcon.projects():
- project_name = project.get("name")
+
+ for doc in self.dbcon.projects(projection={"name": 1},
+ only_active=True):
+
+ project_name = doc.get("name")
if project_name:
project_names.append(project_name)
@@ -299,7 +302,9 @@ class AssetWidget(QtWidgets.QWidget):
def on_project_change(self):
projects = list()
- for project in self.dbcon.projects():
+
+ for project in self.dbcon.projects(projection={"name": 1},
+ only_active=True):
projects.append(project['name'])
project_name = self.combo_projects.currentText()
if project_name in projects:
diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py
index 2965463c37..2ca625f307 100644
--- a/openpype/tools/tray/pype_info_widget.py
+++ b/openpype/tools/tray/pype_info_widget.py
@@ -214,7 +214,7 @@ class PypeInfoWidget(QtWidgets.QWidget):
self.setStyleSheet(style.load_stylesheet())
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
self.setWindowIcon(icon)
self.setWindowTitle("OpenPype info")
diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py
index ed66f1a80f..3050e206ce 100644
--- a/openpype/tools/tray/pype_tray.py
+++ b/openpype/tools/tray/pype_tray.py
@@ -17,6 +17,11 @@ from openpype.api import (
from openpype.lib import get_pype_execute_args
from openpype.modules import TrayModulesManager
from openpype import style
+from openpype.settings import (
+ SystemSettings,
+ ProjectSettings,
+ DefaultsNotDefined
+)
from .pype_info_widget import PypeInfoWidget
@@ -114,6 +119,54 @@ class TrayManager:
self.main_thread_timer = main_thread_timer
+ # For storing missing settings dialog
+ self._settings_validation_dialog = None
+
+ self.execute_in_main_thread(self._startup_validations)
+
+ def _startup_validations(self):
+ """Run possible startup validations."""
+ self._validate_settings_defaults()
+
+ def _validate_settings_defaults(self):
+ valid = True
+ try:
+ SystemSettings()
+ ProjectSettings()
+
+ except DefaultsNotDefined:
+ valid = False
+
+ if valid:
+ return
+
+ title = "Settings miss default values"
+ msg = (
+ "Your OpenPype will not work as expected! \n"
+ "Some default values in settigs are missing. \n\n"
+ "Please contact OpenPype team."
+ )
+ msg_box = QtWidgets.QMessageBox(
+ QtWidgets.QMessageBox.Warning,
+ title,
+ msg,
+ QtWidgets.QMessageBox.Ok,
+ flags=QtCore.Qt.Dialog
+ )
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
+ msg_box.setWindowIcon(icon)
+ msg_box.setStyleSheet(style.load_stylesheet())
+ msg_box.buttonClicked.connect(self._post_validate_settings_defaults)
+
+ self._settings_validation_dialog = msg_box
+
+ msg_box.show()
+
+ def _post_validate_settings_defaults(self):
+ widget = self._settings_validation_dialog
+ self._settings_validation_dialog = None
+ widget.deleteLater()
+
def show_tray_message(self, title, message, icon=None, msecs=None):
"""Show tray message.
@@ -200,7 +253,7 @@ class SystemTrayIcon(QtWidgets.QSystemTrayIcon):
doubleclick_time_ms = 100
def __init__(self, parent):
- icon = QtGui.QIcon(resources.pype_icon_filepath())
+ icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
super(SystemTrayIcon, self).__init__(icon, parent)
@@ -236,6 +289,7 @@ class SystemTrayIcon(QtWidgets.QSystemTrayIcon):
self._click_timer = click_timer
self._doubleclick = False
+ self._click_pos = None
def _click_timer_timeout(self):
self._click_timer.stop()
@@ -248,13 +302,17 @@ class SystemTrayIcon(QtWidgets.QSystemTrayIcon):
self._show_context_menu()
def _show_context_menu(self):
- pos = QtGui.QCursor().pos()
+ pos = self._click_pos
+ self._click_pos = None
+ if pos is None:
+ pos = QtGui.QCursor().pos()
self.contextMenu().popup(pos)
def on_systray_activated(self, reason):
# show contextMenu if left click
if reason == QtWidgets.QSystemTrayIcon.Trigger:
if self.tray_man.doubleclick_callback:
+ self._click_pos = QtGui.QCursor().pos()
self._click_timer.start()
else:
self._show_context_menu()
@@ -308,7 +366,7 @@ class PypeTrayApplication(QtWidgets.QApplication):
splash_widget.hide()
def set_splash(self):
- splash_pix = QtGui.QPixmap(resources.pype_splash_filepath())
+ splash_pix = QtGui.QPixmap(resources.get_openpype_splash_filepath())
splash = QtWidgets.QSplashScreen(splash_pix)
splash.setMask(splash_pix.mask())
splash.setEnabled(False)
diff --git a/openpype/tools/utils/__init__.py b/openpype/tools/utils/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/openpype/tools/utils/delegates.py b/openpype/tools/utils/delegates.py
new file mode 100644
index 0000000000..1827bc7e9b
--- /dev/null
+++ b/openpype/tools/utils/delegates.py
@@ -0,0 +1,449 @@
+import time
+from datetime import datetime
+import logging
+import numbers
+
+import Qt
+from Qt import QtWidgets, QtGui, QtCore
+
+from avalon.lib import HeroVersionType
+from .models import (
+ AssetModel,
+ TreeModel
+)
+from . import lib
+
+if Qt.__binding__ == "PySide":
+ from PySide.QtGui import QStyleOptionViewItemV4
+elif Qt.__binding__ == "PyQt4":
+ from PyQt4.QtGui import QStyleOptionViewItemV4
+
+log = logging.getLogger(__name__)
+
+
+class AssetDelegate(QtWidgets.QItemDelegate):
+ bar_height = 3
+
+ def sizeHint(self, option, index):
+ result = super(AssetDelegate, self).sizeHint(option, index)
+ height = result.height()
+ result.setHeight(height + self.bar_height)
+
+ return result
+
+ def paint(self, painter, option, index):
+ # Qt4 compat
+ if Qt.__binding__ in ("PySide", "PyQt4"):
+ option = QStyleOptionViewItemV4(option)
+
+ painter.save()
+
+ item_rect = QtCore.QRect(option.rect)
+ item_rect.setHeight(option.rect.height() - self.bar_height)
+
+ subset_colors = index.data(AssetModel.subsetColorsRole)
+ subset_colors_width = 0
+ if subset_colors:
+ subset_colors_width = option.rect.width() / len(subset_colors)
+
+ subset_rects = []
+ counter = 0
+ for subset_c in subset_colors:
+ new_color = None
+ new_rect = None
+ if subset_c:
+ new_color = QtGui.QColor(*subset_c)
+
+ new_rect = QtCore.QRect(
+ option.rect.left() + (counter * subset_colors_width),
+ option.rect.top() + (
+ option.rect.height() - self.bar_height
+ ),
+ subset_colors_width,
+ self.bar_height
+ )
+ subset_rects.append((new_color, new_rect))
+ counter += 1
+
+ # Background
+ bg_color = QtGui.QColor(60, 60, 60)
+ if option.state & QtWidgets.QStyle.State_Selected:
+ if len(subset_colors) == 0:
+ item_rect.setTop(item_rect.top() + (self.bar_height / 2))
+ if option.state & QtWidgets.QStyle.State_MouseOver:
+ bg_color.setRgb(70, 70, 70)
+ else:
+ item_rect.setTop(item_rect.top() + (self.bar_height / 2))
+ if option.state & QtWidgets.QStyle.State_MouseOver:
+ bg_color.setAlpha(100)
+ else:
+ bg_color.setAlpha(0)
+
+ # When not needed to do a rounded corners (easier and without
+ # painter restore):
+ # painter.fillRect(
+ # item_rect,
+ # QtGui.QBrush(bg_color)
+ # )
+ pen = painter.pen()
+ pen.setStyle(QtCore.Qt.NoPen)
+ pen.setWidth(0)
+ painter.setPen(pen)
+ painter.setBrush(QtGui.QBrush(bg_color))
+ painter.drawRoundedRect(option.rect, 3, 3)
+
+ if option.state & QtWidgets.QStyle.State_Selected:
+ for color, subset_rect in subset_rects:
+ if not color or not subset_rect:
+ continue
+ painter.fillRect(subset_rect, QtGui.QBrush(color))
+
+ painter.restore()
+ painter.save()
+
+ # Icon
+ icon_index = index.model().index(
+ index.row(), index.column(), index.parent()
+ )
+ # - Default icon_rect if not icon
+ icon_rect = QtCore.QRect(
+ item_rect.left(),
+ item_rect.top(),
+ # To make sure it's same size all the time
+ option.rect.height() - self.bar_height,
+ option.rect.height() - self.bar_height
+ )
+ icon = index.model().data(icon_index, QtCore.Qt.DecorationRole)
+
+ if icon:
+ mode = QtGui.QIcon.Normal
+ if not (option.state & QtWidgets.QStyle.State_Enabled):
+ mode = QtGui.QIcon.Disabled
+ elif option.state & QtWidgets.QStyle.State_Selected:
+ mode = QtGui.QIcon.Selected
+
+ if isinstance(icon, QtGui.QPixmap):
+ icon = QtGui.QIcon(icon)
+ option.decorationSize = icon.size() / icon.devicePixelRatio()
+
+ elif isinstance(icon, QtGui.QColor):
+ pixmap = QtGui.QPixmap(option.decorationSize)
+ pixmap.fill(icon)
+ icon = QtGui.QIcon(pixmap)
+
+ elif isinstance(icon, QtGui.QImage):
+ icon = QtGui.QIcon(QtGui.QPixmap.fromImage(icon))
+ option.decorationSize = icon.size() / icon.devicePixelRatio()
+
+ elif isinstance(icon, QtGui.QIcon):
+ state = QtGui.QIcon.Off
+ if option.state & QtWidgets.QStyle.State_Open:
+ state = QtGui.QIcon.On
+ actualSize = option.icon.actualSize(
+ option.decorationSize, mode, state
+ )
+ option.decorationSize = QtCore.QSize(
+ min(option.decorationSize.width(), actualSize.width()),
+ min(option.decorationSize.height(), actualSize.height())
+ )
+
+ state = QtGui.QIcon.Off
+ if option.state & QtWidgets.QStyle.State_Open:
+ state = QtGui.QIcon.On
+
+ icon.paint(
+ painter, icon_rect,
+ QtCore.Qt.AlignLeft, mode, state
+ )
+
+ # Text
+ text_rect = QtCore.QRect(
+ icon_rect.left() + icon_rect.width() + 2,
+ item_rect.top(),
+ item_rect.width(),
+ item_rect.height()
+ )
+
+ painter.drawText(
+ text_rect, QtCore.Qt.AlignVCenter,
+ index.data(QtCore.Qt.DisplayRole)
+ )
+
+ painter.restore()
+
+
+class VersionDelegate(QtWidgets.QStyledItemDelegate):
+ """A delegate that display version integer formatted as version string."""
+
+ version_changed = QtCore.Signal()
+ first_run = False
+ lock = False
+
+ def __init__(self, dbcon, *args, **kwargs):
+ self.dbcon = dbcon
+ super(VersionDelegate, self).__init__(*args, **kwargs)
+
+ def displayText(self, value, locale):
+ if isinstance(value, HeroVersionType):
+ return lib.format_version(value, True)
+ assert isinstance(value, numbers.Integral), (
+ "Version is not integer. \"{}\" {}".format(value, str(type(value)))
+ )
+ return lib.format_version(value)
+
+ def paint(self, painter, option, index):
+ fg_color = index.data(QtCore.Qt.ForegroundRole)
+ if fg_color:
+ if isinstance(fg_color, QtGui.QBrush):
+ fg_color = fg_color.color()
+ elif isinstance(fg_color, QtGui.QColor):
+ pass
+ else:
+ fg_color = None
+
+ if not fg_color:
+ return super(VersionDelegate, self).paint(painter, option, index)
+
+ if option.widget:
+ style = option.widget.style()
+ else:
+ style = QtWidgets.QApplication.style()
+
+ style.drawControl(
+ style.CE_ItemViewItem, option, painter, option.widget
+ )
+
+ painter.save()
+
+ text = self.displayText(
+ index.data(QtCore.Qt.DisplayRole), option.locale
+ )
+ pen = painter.pen()
+ pen.setColor(fg_color)
+ painter.setPen(pen)
+
+ text_rect = style.subElementRect(style.SE_ItemViewItemText, option)
+ text_margin = style.proxy().pixelMetric(
+ style.PM_FocusFrameHMargin, option, option.widget
+ ) + 1
+
+ painter.drawText(
+ text_rect.adjusted(text_margin, 0, - text_margin, 0),
+ option.displayAlignment,
+ text
+ )
+
+ painter.restore()
+
+ def createEditor(self, parent, option, index):
+ item = index.data(TreeModel.ItemRole)
+ if item.get("isGroup") or item.get("isMerged"):
+ return
+
+ editor = QtWidgets.QComboBox(parent)
+
+ def commit_data():
+ if not self.first_run:
+ self.commitData.emit(editor) # Update model data
+ self.version_changed.emit() # Display model data
+ editor.currentIndexChanged.connect(commit_data)
+
+ self.first_run = True
+ self.lock = False
+
+ return editor
+
+ def setEditorData(self, editor, index):
+ if self.lock:
+ # Only set editor data once per delegation
+ return
+
+ editor.clear()
+
+ # Current value of the index
+ item = index.data(TreeModel.ItemRole)
+ value = index.data(QtCore.Qt.DisplayRole)
+ if item["version_document"]["type"] != "hero_version":
+ assert isinstance(value, numbers.Integral), (
+ "Version is not integer"
+ )
+
+ # Add all available versions to the editor
+ parent_id = item["version_document"]["parent"]
+ version_docs = list(self.dbcon.find(
+ {
+ "type": "version",
+ "parent": parent_id
+ },
+ sort=[("name", 1)]
+ ))
+
+ hero_version_doc = self.dbcon.find_one(
+ {
+ "type": "hero_version",
+ "parent": parent_id
+ }, {
+ "name": 1,
+ "data.tags": 1,
+ "version_id": 1
+ }
+ )
+
+ doc_for_hero_version = None
+
+ selected = None
+ items = []
+ for version_doc in version_docs:
+ version_tags = version_doc["data"].get("tags") or []
+ if "deleted" in version_tags:
+ continue
+
+ if (
+ hero_version_doc
+ and doc_for_hero_version is None
+ and hero_version_doc["version_id"] == version_doc["_id"]
+ ):
+ doc_for_hero_version = version_doc
+
+ label = lib.format_version(version_doc["name"])
+ item = QtGui.QStandardItem(label)
+ item.setData(version_doc, QtCore.Qt.UserRole)
+ items.append(item)
+
+ if version_doc["name"] == value:
+ selected = item
+
+ if hero_version_doc and doc_for_hero_version:
+ version_name = doc_for_hero_version["name"]
+ label = lib.format_version(version_name, True)
+ if isinstance(value, HeroVersionType):
+ index = len(version_docs)
+ hero_version_doc["name"] = HeroVersionType(version_name)
+
+ item = QtGui.QStandardItem(label)
+ item.setData(hero_version_doc, QtCore.Qt.UserRole)
+ items.append(item)
+
+ # Reverse items so latest versions be upper
+ items = list(reversed(items))
+ for item in items:
+ editor.model().appendRow(item)
+
+ index = 0
+ if selected:
+ index = selected.row()
+
+ # Will trigger index-change signal
+ editor.setCurrentIndex(index)
+ self.first_run = False
+ self.lock = True
+
+ def setModelData(self, editor, model, index):
+ """Apply the integer version back in the model"""
+ version = editor.itemData(editor.currentIndex())
+ model.setData(index, version["name"])
+
+
+def pretty_date(t, now=None, strftime="%b %d %Y %H:%M"):
+ """Parse datetime to readable timestamp
+
+ Within first ten seconds:
+ - "just now",
+ Within first minute ago:
+ - "%S seconds ago"
+ Within one hour ago:
+ - "%M minutes ago".
+ Within one day ago:
+ - "%H:%M hours ago"
+ Else:
+ "%Y-%m-%d %H:%M:%S"
+
+ """
+
+ assert isinstance(t, datetime)
+ if now is None:
+ now = datetime.now()
+ assert isinstance(now, datetime)
+ diff = now - t
+
+ second_diff = diff.seconds
+ day_diff = diff.days
+
+ # future (consider as just now)
+ if day_diff < 0:
+ return "just now"
+
+ # history
+ if day_diff == 0:
+ if second_diff < 10:
+ return "just now"
+ if second_diff < 60:
+ return str(second_diff) + " seconds ago"
+ if second_diff < 120:
+ return "a minute ago"
+ if second_diff < 3600:
+ return str(second_diff // 60) + " minutes ago"
+ if second_diff < 86400:
+ minutes = (second_diff % 3600) // 60
+ hours = second_diff // 3600
+ return "{0}:{1:02d} hours ago".format(hours, minutes)
+
+ return t.strftime(strftime)
+
+
+def pretty_timestamp(t, now=None):
+ """Parse timestamp to user readable format
+
+ >>> pretty_timestamp("20170614T151122Z", now="20170614T151123Z")
+ 'just now'
+
+ >>> pretty_timestamp("20170614T151122Z", now="20170614T171222Z")
+ '2:01 hours ago'
+
+ Args:
+ t (str): The time string to parse.
+ now (str, optional)
+
+ Returns:
+ str: human readable "recent" date.
+
+ """
+
+ if now is not None:
+ try:
+ now = time.strptime(now, "%Y%m%dT%H%M%SZ")
+ now = datetime.fromtimestamp(time.mktime(now))
+ except ValueError as e:
+ log.warning("Can't parse 'now' time format: {0} {1}".format(t, e))
+ return None
+
+ if isinstance(t, float):
+ dt = datetime.fromtimestamp(t)
+ else:
+ # Parse the time format as if it is `str` result from
+ # `pyblish.lib.time()` which usually is stored in Avalon database.
+ try:
+ t = time.strptime(t, "%Y%m%dT%H%M%SZ")
+ except ValueError as e:
+ log.warning("Can't parse time format: {0} {1}".format(t, e))
+ return None
+ dt = datetime.fromtimestamp(time.mktime(t))
+
+ # prettify
+ return pretty_date(dt, now=now)
+
+
+class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate):
+ """A delegate that displays a timestamp as a pretty date.
+
+ This displays dates like `pretty_date`.
+
+ """
+
+ def displayText(self, value, locale):
+
+ if value is None:
+ # Ignore None value
+ return
+
+ return pretty_timestamp(value)
diff --git a/openpype/tools/utils/lib.py b/openpype/tools/utils/lib.py
new file mode 100644
index 0000000000..d01dbbd169
--- /dev/null
+++ b/openpype/tools/utils/lib.py
@@ -0,0 +1,625 @@
+import os
+import sys
+import contextlib
+import collections
+
+from Qt import QtWidgets, QtCore, QtGui
+
+import avalon.api
+from avalon import style
+from avalon.vendor import qtawesome
+
+from openpype.api import get_project_settings
+from openpype.lib import filter_profiles
+
+
+def format_version(value, hero_version=False):
+ """Formats integer to displayable version name"""
+ label = "v{0:03d}".format(value)
+ if not hero_version:
+ return label
+ return "[{}]".format(label)
+
+
+@contextlib.contextmanager
+def application():
+ app = QtWidgets.QApplication.instance()
+
+ if not app:
+ print("Starting new QApplication..")
+ app = QtWidgets.QApplication(sys.argv)
+ yield app
+ app.exec_()
+ else:
+ print("Using existing QApplication..")
+ yield app
+
+
+def defer(delay, func):
+ """Append artificial delay to `func`
+
+ This aids in keeping the GUI responsive, but complicates logic
+ when producing tests. To combat this, the environment variable ensures
+ that every operation is synchonous.
+
+ Arguments:
+ delay (float): Delay multiplier; default 1, 0 means no delay
+ func (callable): Any callable
+
+ """
+
+ delay *= float(os.getenv("PYBLISH_DELAY", 1))
+ if delay > 0:
+ return QtCore.QTimer.singleShot(delay, func)
+ else:
+ return func()
+
+
+class SharedObjects:
+ jobs = {}
+
+
+def schedule(func, time, channel="default"):
+ """Run `func` at a later `time` in a dedicated `channel`
+
+ Given an arbitrary function, call this function after a given
+ timeout. It will ensure that only one "job" is running within
+ the given channel at any one time and cancel any currently
+ running job if a new job is submitted before the timeout.
+
+ """
+
+ try:
+ SharedObjects.jobs[channel].stop()
+ except (AttributeError, KeyError, RuntimeError):
+ pass
+
+ timer = QtCore.QTimer()
+ timer.setSingleShot(True)
+ timer.timeout.connect(func)
+ timer.start(time)
+
+ SharedObjects.jobs[channel] = timer
+
+
+@contextlib.contextmanager
+def dummy():
+ """Dummy context manager
+
+ Usage:
+ >> with some_context() if False else dummy():
+ .. pass
+
+ """
+ yield
+
+
+def iter_model_rows(model, column, include_root=False):
+ """Iterate over all row indices in a model"""
+ indices = [QtCore.QModelIndex()] # start iteration at root
+
+ for index in indices:
+ # Add children to the iterations
+ child_rows = model.rowCount(index)
+ for child_row in range(child_rows):
+ child_index = model.index(child_row, column, index)
+ indices.append(child_index)
+
+ if not include_root and not index.isValid():
+ continue
+
+ yield index
+
+
+@contextlib.contextmanager
+def preserve_states(tree_view,
+ column=0,
+ role=None,
+ preserve_expanded=True,
+ preserve_selection=True,
+ expanded_role=QtCore.Qt.DisplayRole,
+ selection_role=QtCore.Qt.DisplayRole):
+ """Preserves row selection in QTreeView by column's data role.
+ This function is created to maintain the selection status of
+ the model items. When refresh is triggered the items which are expanded
+ will stay expanded and vise versa.
+ tree_view (QWidgets.QTreeView): the tree view nested in the application
+ column (int): the column to retrieve the data from
+ role (int): the role which dictates what will be returned
+ Returns:
+ None
+ """
+ # When `role` is set then override both expanded and selection roles
+ if role:
+ expanded_role = role
+ selection_role = role
+
+ model = tree_view.model()
+ selection_model = tree_view.selectionModel()
+ flags = selection_model.Select | selection_model.Rows
+
+ expanded = set()
+
+ if preserve_expanded:
+ for index in iter_model_rows(
+ model, column=column, include_root=False
+ ):
+ if tree_view.isExpanded(index):
+ value = index.data(expanded_role)
+ expanded.add(value)
+
+ selected = None
+
+ if preserve_selection:
+ selected_rows = selection_model.selectedRows()
+ if selected_rows:
+ selected = set(row.data(selection_role) for row in selected_rows)
+
+ try:
+ yield
+ finally:
+ if expanded:
+ for index in iter_model_rows(
+ model, column=0, include_root=False
+ ):
+ value = index.data(expanded_role)
+ is_expanded = value in expanded
+ # skip if new index was created meanwhile
+ if is_expanded is None:
+ continue
+ tree_view.setExpanded(index, is_expanded)
+
+ if selected:
+ # Go through all indices, select the ones with similar data
+ for index in iter_model_rows(
+ model, column=column, include_root=False
+ ):
+ value = index.data(selection_role)
+ state = value in selected
+ if state:
+ tree_view.scrollTo(index) # Ensure item is visible
+ selection_model.select(index, flags)
+
+
+@contextlib.contextmanager
+def preserve_expanded_rows(tree_view, column=0, role=None):
+ """Preserves expanded row in QTreeView by column's data role.
+
+ This function is created to maintain the expand vs collapse status of
+ the model items. When refresh is triggered the items which are expanded
+ will stay expanded and vise versa.
+
+ Arguments:
+ tree_view (QWidgets.QTreeView): the tree view which is
+ nested in the application
+ column (int): the column to retrieve the data from
+ role (int): the role which dictates what will be returned
+
+ Returns:
+ None
+
+ """
+ if role is None:
+ role = QtCore.Qt.DisplayRole
+ model = tree_view.model()
+
+ expanded = set()
+
+ for index in iter_model_rows(model, column=column, include_root=False):
+ if tree_view.isExpanded(index):
+ value = index.data(role)
+ expanded.add(value)
+
+ try:
+ yield
+ finally:
+ if not expanded:
+ return
+
+ for index in iter_model_rows(model, column=column, include_root=False):
+ value = index.data(role)
+ state = value in expanded
+ if state:
+ tree_view.expand(index)
+ else:
+ tree_view.collapse(index)
+
+
+@contextlib.contextmanager
+def preserve_selection(tree_view, column=0, role=None, current_index=True):
+ """Preserves row selection in QTreeView by column's data role.
+
+ This function is created to maintain the selection status of
+ the model items. When refresh is triggered the items which are expanded
+ will stay expanded and vise versa.
+
+ tree_view (QWidgets.QTreeView): the tree view nested in the application
+ column (int): the column to retrieve the data from
+ role (int): the role which dictates what will be returned
+
+ Returns:
+ None
+
+ """
+ if role is None:
+ role = QtCore.Qt.DisplayRole
+ model = tree_view.model()
+ selection_model = tree_view.selectionModel()
+ flags = selection_model.Select | selection_model.Rows
+
+ if current_index:
+ current_index_value = tree_view.currentIndex().data(role)
+ else:
+ current_index_value = None
+
+ selected_rows = selection_model.selectedRows()
+ if not selected_rows:
+ yield
+ return
+
+ selected = set(row.data(role) for row in selected_rows)
+ try:
+ yield
+ finally:
+ if not selected:
+ return
+
+ # Go through all indices, select the ones with similar data
+ for index in iter_model_rows(model, column=column, include_root=False):
+ value = index.data(role)
+ state = value in selected
+ if state:
+ tree_view.scrollTo(index) # Ensure item is visible
+ selection_model.select(index, flags)
+
+ if current_index_value and value == current_index_value:
+ selection_model.setCurrentIndex(
+ index, selection_model.NoUpdate
+ )
+
+
+class FamilyConfigCache:
+ default_color = "#0091B2"
+ _default_icon = None
+
+ def __init__(self, dbcon):
+ self.dbcon = dbcon
+ self.family_configs = {}
+ self._family_filters_set = False
+ self._require_refresh = True
+
+ @classmethod
+ def default_icon(cls):
+ if cls._default_icon is None:
+ cls._default_icon = qtawesome.icon(
+ "fa.folder", color=cls.default_color
+ )
+ return cls._default_icon
+
+ def family_config(self, family_name):
+ """Get value from config with fallback to default"""
+ if self._require_refresh:
+ self._refresh()
+
+ item = self.family_configs.get(family_name)
+ if not item:
+ item = {
+ "icon": self.default_icon()
+ }
+ if self._family_filters_set:
+ item["state"] = False
+ return item
+
+ def refresh(self, force=False):
+ self._require_refresh = True
+
+ if force:
+ self._refresh()
+
+ def _refresh(self):
+ """Get the family configurations from the database
+
+ The configuration must be stored on the project under `config`.
+ For example:
+
+ {"config": {
+ "families": [
+ {"name": "avalon.camera", label: "Camera", "icon": "photo"},
+ {"name": "avalon.anim", label: "Animation", "icon": "male"},
+ ]
+ }}
+
+ It is possible to override the default behavior and set specific
+ families checked. For example we only want the families imagesequence
+ and camera to be visible in the Loader.
+ """
+ self._require_refresh = False
+ self._family_filters_set = False
+
+ self.family_configs.clear()
+ # Skip if we're not in host context
+ if not avalon.api.registered_host():
+ return
+
+ # Update the icons from the project configuration
+ project_name = os.environ.get("AVALON_PROJECT")
+ asset_name = os.environ.get("AVALON_ASSET")
+ task_name = os.environ.get("AVALON_TASK")
+ if not all((project_name, asset_name, task_name)):
+ return
+
+ matching_item = None
+ project_settings = get_project_settings(project_name)
+ profiles = (
+ project_settings
+ ["global"]
+ ["tools"]
+ ["loader"]
+ ["family_filter_profiles"]
+ )
+ if profiles:
+ asset_doc = self.dbcon.find_one(
+ {"type": "asset", "name": asset_name},
+ {"data.tasks": True}
+ )
+ tasks_info = asset_doc.get("data", {}).get("tasks") or {}
+ task_type = tasks_info.get(task_name, {}).get("type")
+ profiles_filter = {
+ "task_types": task_type,
+ "hosts": os.environ["AVALON_APP"]
+ }
+ matching_item = filter_profiles(profiles, profiles_filter)
+
+ families = []
+ if matching_item:
+ families = matching_item["filter_families"]
+
+ if not families:
+ return
+
+ self._family_filters_set = True
+
+ # Replace icons with a Qt icon we can use in the user interfaces
+ for family in families:
+ family_info = {
+ "name": family,
+ "icon": self.default_icon(),
+ "state": True
+ }
+
+ self.family_configs[family] = family_info
+
+
+class GroupsConfig:
+ # Subset group item's default icon and order
+ _default_group_config = None
+
+ def __init__(self, dbcon):
+ self.dbcon = dbcon
+ self.groups = {}
+
+ @classmethod
+ def default_group_config(cls):
+ if cls._default_group_config is None:
+ cls._default_group_config = {
+ "icon": qtawesome.icon(
+ "fa.object-group",
+ color=style.colors.default
+ ),
+ "order": 0
+ }
+ return cls._default_group_config
+
+ def refresh(self):
+ """Get subset group configurations from the database
+
+ The 'group' configuration must be stored in the project `config` field.
+ See schema `config-1.0.json`
+
+ """
+ # Clear cached groups
+ self.groups.clear()
+
+ group_configs = []
+ project_name = self.dbcon.Session.get("AVALON_PROJECT")
+ if project_name:
+ # Get pre-defined group name and apperance from project config
+ project_doc = self.dbcon.find_one(
+ {"type": "project"},
+ projection={"config.groups": True}
+ )
+
+ if project_doc:
+ group_configs = project_doc["config"].get("groups") or []
+ else:
+ print("Project not found! \"{}\"".format(project_name))
+
+ # Build pre-defined group configs
+ for config in group_configs:
+ name = config["name"]
+ icon = "fa." + config.get("icon", "object-group")
+ color = config.get("color", style.colors.default)
+ order = float(config.get("order", 0))
+
+ self.groups[name] = {
+ "icon": qtawesome.icon(icon, color=color),
+ "order": order
+ }
+
+ return self.groups
+
+ def ordered_groups(self, group_names):
+ # default order zero included
+ _orders = set([0])
+ for config in self.groups.values():
+ _orders.add(config["order"])
+
+ # Remap order to list index
+ orders = sorted(_orders)
+
+ _groups = list()
+ for name in group_names:
+ # Get group config
+ config = self.groups.get(name) or self.default_group_config()
+ # Base order
+ remapped_order = orders.index(config["order"])
+
+ data = {
+ "name": name,
+ "icon": config["icon"],
+ "_order": remapped_order,
+ }
+
+ _groups.append(data)
+
+ # Sort by tuple (base_order, name)
+ # If there are multiple groups in same order, will sorted by name.
+ ordered_groups = sorted(
+ _groups, key=lambda _group: (_group.pop("_order"), _group["name"])
+ )
+
+ total = len(ordered_groups)
+ order_temp = "%0{}d".format(len(str(total)))
+
+ # Update sorted order to config
+ for index, group_data in enumerate(ordered_groups):
+ order = index
+ inverse_order = total - index
+
+ # Format orders into fixed length string for groups sorting
+ group_data["order"] = order_temp % order
+ group_data["inverseOrder"] = order_temp % inverse_order
+
+ return ordered_groups
+
+ def active_groups(self, asset_ids, include_predefined=True):
+ """Collect all active groups from each subset"""
+ # Collect groups from subsets
+ group_names = set(
+ self.dbcon.distinct(
+ "data.subsetGroup",
+ {"type": "subset", "parent": {"$in": asset_ids}}
+ )
+ )
+ if include_predefined:
+ # Ensure all predefined group configs will be included
+ group_names.update(self.groups.keys())
+
+ return self.ordered_groups(group_names)
+
+ def split_subsets_for_groups(self, subset_docs, grouping):
+ """Collect all active groups from each subset"""
+ subset_docs_without_group = collections.defaultdict(list)
+ subset_docs_by_group = collections.defaultdict(dict)
+ for subset_doc in subset_docs:
+ subset_name = subset_doc["name"]
+ if grouping:
+ group_name = subset_doc["data"].get("subsetGroup")
+ if group_name:
+ if subset_name not in subset_docs_by_group[group_name]:
+ subset_docs_by_group[group_name][subset_name] = []
+
+ subset_docs_by_group[group_name][subset_name].append(
+ subset_doc
+ )
+ continue
+
+ subset_docs_without_group[subset_name].append(subset_doc)
+
+ ordered_groups = self.ordered_groups(subset_docs_by_group.keys())
+
+ return ordered_groups, subset_docs_without_group, subset_docs_by_group
+
+
+def create_qthread(func, *args, **kwargs):
+ class Thread(QtCore.QThread):
+ def run(self):
+ func(*args, **kwargs)
+ return Thread()
+
+
+def get_repre_icons():
+ try:
+ from openpype_modules import sync_server
+ except Exception:
+ # Backwards compatibility
+ from openpype.modules import sync_server
+
+ resource_path = os.path.join(
+ os.path.dirname(sync_server.sync_server_module.__file__),
+ "providers", "resources"
+ )
+ icons = {}
+ # TODO get from sync module
+ for provider in ['studio', 'local_drive', 'gdrive']:
+ pix_url = "{}/{}.png".format(resource_path, provider)
+ icons[provider] = QtGui.QIcon(pix_url)
+
+ return icons
+
+
+def get_progress_for_repre(doc, active_site, remote_site):
+ """
+ Calculates average progress for representation.
+
+ If site has created_dt >> fully available >> progress == 1
+
+ Could be calculated in aggregate if it would be too slow
+ Args:
+ doc(dict): representation dict
+ Returns:
+ (dict) with active and remote sites progress
+ {'studio': 1.0, 'gdrive': -1} - gdrive site is not present
+ -1 is used to highlight the site should be added
+ {'studio': 1.0, 'gdrive': 0.0} - gdrive site is present, not
+ uploaded yet
+ """
+ progress = {active_site: -1,
+ remote_site: -1}
+ if not doc:
+ return progress
+
+ files = {active_site: 0, remote_site: 0}
+ doc_files = doc.get("files") or []
+ for doc_file in doc_files:
+ if not isinstance(doc_file, dict):
+ continue
+
+ sites = doc_file.get("sites") or []
+ for site in sites:
+ if (
+ # Pype 2 compatibility
+ not isinstance(site, dict)
+ # Check if site name is one of progress sites
+ or site["name"] not in progress
+ ):
+ continue
+
+ files[site["name"]] += 1
+ norm_progress = max(progress[site["name"]], 0)
+ if site.get("created_dt"):
+ progress[site["name"]] = norm_progress + 1
+ elif site.get("progress"):
+ progress[site["name"]] = norm_progress + site["progress"]
+ else: # site exists, might be failed, do not add again
+ progress[site["name"]] = 0
+
+ # for example 13 fully avail. files out of 26 >> 13/26 = 0.5
+ avg_progress = {}
+ avg_progress[active_site] = \
+ progress[active_site] / max(files[active_site], 1)
+ avg_progress[remote_site] = \
+ progress[remote_site] / max(files[remote_site], 1)
+ return avg_progress
+
+
+def is_sync_loader(loader):
+ return is_remove_site_loader(loader) or is_add_site_loader(loader)
+
+
+def is_remove_site_loader(loader):
+ return hasattr(loader, "remove_site_on_representation")
+
+
+def is_add_site_loader(loader):
+ return hasattr(loader, "add_site_to_representation")
diff --git a/openpype/tools/utils/models.py b/openpype/tools/utils/models.py
new file mode 100644
index 0000000000..c5e1ce1b12
--- /dev/null
+++ b/openpype/tools/utils/models.py
@@ -0,0 +1,500 @@
+import re
+import time
+import logging
+import collections
+
+import Qt
+from Qt import QtCore, QtGui
+from avalon.vendor import qtawesome
+from avalon import style, io
+from . import lib
+
+log = logging.getLogger(__name__)
+
+
+class TreeModel(QtCore.QAbstractItemModel):
+
+ Columns = list()
+ ItemRole = QtCore.Qt.UserRole + 1
+ item_class = None
+
+ def __init__(self, parent=None):
+ super(TreeModel, self).__init__(parent)
+ self._root_item = self.ItemClass()
+
+ @property
+ def ItemClass(self):
+ if self.item_class is not None:
+ return self.item_class
+ return Item
+
+ def rowCount(self, parent=None):
+ if parent is None or not parent.isValid():
+ parent_item = self._root_item
+ else:
+ parent_item = parent.internalPointer()
+ return parent_item.childCount()
+
+ def columnCount(self, parent):
+ return len(self.Columns)
+
+ def data(self, index, role):
+ if not index.isValid():
+ return None
+
+ if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
+ item = index.internalPointer()
+ column = index.column()
+
+ key = self.Columns[column]
+ return item.get(key, None)
+
+ if role == self.ItemRole:
+ return index.internalPointer()
+
+ def setData(self, index, value, role=QtCore.Qt.EditRole):
+ """Change the data on the items.
+
+ Returns:
+ bool: Whether the edit was successful
+ """
+
+ if index.isValid():
+ if role == QtCore.Qt.EditRole:
+
+ item = index.internalPointer()
+ column = index.column()
+ key = self.Columns[column]
+ item[key] = value
+
+ # passing `list()` for PyQt5 (see PYSIDE-462)
+ if Qt.__binding__ in ("PyQt4", "PySide"):
+ self.dataChanged.emit(index, index)
+ else:
+ self.dataChanged.emit(index, index, [role])
+
+ # must return true if successful
+ return True
+
+ return False
+
+ def setColumns(self, keys):
+ assert isinstance(keys, (list, tuple))
+ self.Columns = keys
+
+ def headerData(self, section, orientation, role):
+
+ if role == QtCore.Qt.DisplayRole:
+ if section < len(self.Columns):
+ return self.Columns[section]
+
+ super(TreeModel, self).headerData(section, orientation, role)
+
+ def flags(self, index):
+ flags = QtCore.Qt.ItemIsEnabled
+
+ item = index.internalPointer()
+ if item.get("enabled", True):
+ flags |= QtCore.Qt.ItemIsSelectable
+
+ return flags
+
+ def parent(self, index):
+
+ item = index.internalPointer()
+ parent_item = item.parent()
+
+ # If it has no parents we return invalid
+ if parent_item == self._root_item or not parent_item:
+ return QtCore.QModelIndex()
+
+ return self.createIndex(parent_item.row(), 0, parent_item)
+
+ def index(self, row, column, parent=None):
+ """Return index for row/column under parent"""
+
+ if parent is None or not parent.isValid():
+ parent_item = self._root_item
+ else:
+ parent_item = parent.internalPointer()
+
+ child_item = parent_item.child(row)
+ if child_item:
+ return self.createIndex(row, column, child_item)
+ else:
+ return QtCore.QModelIndex()
+
+ def add_child(self, item, parent=None):
+ if parent is None:
+ parent = self._root_item
+
+ parent.add_child(item)
+
+ def column_name(self, column):
+ """Return column key by index"""
+
+ if column < len(self.Columns):
+ return self.Columns[column]
+
+ def clear(self):
+ self.beginResetModel()
+ self._root_item = self.ItemClass()
+ self.endResetModel()
+
+
+class Item(dict):
+ """An item that can be represented in a tree view using `TreeModel`.
+
+ The item can store data just like a regular dictionary.
+
+ >>> data = {"name": "John", "score": 10}
+ >>> item = Item(data)
+ >>> assert item["name"] == "John"
+
+ """
+
+ def __init__(self, data=None):
+ super(Item, self).__init__()
+
+ self._children = list()
+ self._parent = None
+
+ if data is not None:
+ assert isinstance(data, dict)
+ self.update(data)
+
+ def childCount(self):
+ return len(self._children)
+
+ def child(self, row):
+
+ if row >= len(self._children):
+ log.warning("Invalid row as child: {0}".format(row))
+ return
+
+ return self._children[row]
+
+ def children(self):
+ return self._children
+
+ def parent(self):
+ return self._parent
+
+ def row(self):
+ """
+ Returns:
+ int: Index of this item under parent"""
+ if self._parent is not None:
+ siblings = self.parent().children()
+ return siblings.index(self)
+ return -1
+
+ def add_child(self, child):
+ """Add a child to this item"""
+ child._parent = self
+ self._children.append(child)
+
+
+class AssetModel(TreeModel):
+ """A model listing assets in the silo in the active project.
+
+ The assets are displayed in a treeview, they are visually parented by
+ a `visualParent` field in the database containing an `_id` to a parent
+ asset.
+
+ """
+
+ Columns = ["label"]
+ Name = 0
+ Deprecated = 2
+ ObjectId = 3
+
+ DocumentRole = QtCore.Qt.UserRole + 2
+ ObjectIdRole = QtCore.Qt.UserRole + 3
+ subsetColorsRole = QtCore.Qt.UserRole + 4
+
+ doc_fetched = QtCore.Signal(bool)
+ refreshed = QtCore.Signal(bool)
+
+ # Asset document projection
+ asset_projection = {
+ "type": 1,
+ "schema": 1,
+ "name": 1,
+ "silo": 1,
+ "data.visualParent": 1,
+ "data.label": 1,
+ "data.tags": 1,
+ "data.icon": 1,
+ "data.color": 1,
+ "data.deprecated": 1
+ }
+
+ def __init__(self, dbcon=None, parent=None, asset_projection=None):
+ super(AssetModel, self).__init__(parent=parent)
+ if dbcon is None:
+ dbcon = io
+ self.dbcon = dbcon
+ self.asset_colors = {}
+
+ # Projections for Mongo queries
+ # - let ability to modify them if used in tools that require more than
+ # defaults
+ if asset_projection:
+ self.asset_projection = asset_projection
+
+ self.asset_projection = asset_projection
+
+ self._doc_fetching_thread = None
+ self._doc_fetching_stop = False
+ self._doc_payload = {}
+
+ self.doc_fetched.connect(self.on_doc_fetched)
+
+ self.refresh()
+
+ def _add_hierarchy(self, assets, parent=None, silos=None):
+ """Add the assets that are related to the parent as children items.
+
+ This method does *not* query the database. These instead are queried
+ in a single batch upfront as an optimization to reduce database
+ queries. Resulting in up to 10x speed increase.
+
+ Args:
+ assets (dict): All assets in the currently active silo stored
+ by key/value
+
+ Returns:
+ None
+
+ """
+ # Reset colors
+ self.asset_colors = {}
+
+ if silos:
+ # WARNING: Silo item "_id" is set to silo value
+ # mainly because GUI issue with perserve selection and expanded row
+ # and because of easier hierarchy parenting (in "assets")
+ for silo in silos:
+ item = Item({
+ "_id": silo,
+ "name": silo,
+ "label": silo,
+ "type": "silo"
+ })
+ self.add_child(item, parent=parent)
+ self._add_hierarchy(assets, parent=item)
+
+ parent_id = parent["_id"] if parent else None
+ current_assets = assets.get(parent_id, list())
+
+ for asset in current_assets:
+ # get label from data, otherwise use name
+ data = asset.get("data", {})
+ label = data.get("label", asset["name"])
+ tags = data.get("tags", [])
+
+ # store for the asset for optimization
+ deprecated = "deprecated" in tags
+
+ item = Item({
+ "_id": asset["_id"],
+ "name": asset["name"],
+ "label": label,
+ "type": asset["type"],
+ "tags": ", ".join(tags),
+ "deprecated": deprecated,
+ "_document": asset
+ })
+ self.add_child(item, parent=parent)
+
+ # Add asset's children recursively if it has children
+ if asset["_id"] in assets:
+ self._add_hierarchy(assets, parent=item)
+
+ self.asset_colors[asset["_id"]] = []
+
+ def on_doc_fetched(self, was_stopped):
+ if was_stopped:
+ self.stop_fetch_thread()
+ return
+
+ self.beginResetModel()
+
+ assets_by_parent = self._doc_payload.get("assets_by_parent")
+ silos = self._doc_payload.get("silos")
+ if assets_by_parent is not None:
+ # Build the hierarchical tree items recursively
+ self._add_hierarchy(
+ assets_by_parent,
+ parent=None,
+ silos=silos
+ )
+
+ self.endResetModel()
+
+ has_content = bool(assets_by_parent) or bool(silos)
+ self.refreshed.emit(has_content)
+
+ self.stop_fetch_thread()
+
+ def fetch(self):
+ self._doc_payload = self._fetch() or {}
+ # Emit doc fetched only if was not stopped
+ self.doc_fetched.emit(self._doc_fetching_stop)
+
+ def _fetch(self):
+ if not self.dbcon.Session.get("AVALON_PROJECT"):
+ return
+
+ project_doc = self.dbcon.find_one(
+ {"type": "project"},
+ {"_id": True}
+ )
+ if not project_doc:
+ return
+
+ # Get all assets sorted by name
+ db_assets = self.dbcon.find(
+ {"type": "asset"},
+ self.asset_projection
+ ).sort("name", 1)
+
+ # Group the assets by their visual parent's id
+ assets_by_parent = collections.defaultdict(list)
+ for asset in db_assets:
+ if self._doc_fetching_stop:
+ return
+ parent_id = asset.get("data", {}).get("visualParent")
+ assets_by_parent[parent_id].append(asset)
+
+ return {
+ "assets_by_parent": assets_by_parent,
+ "silos": None
+ }
+
+ def stop_fetch_thread(self):
+ if self._doc_fetching_thread is not None:
+ self._doc_fetching_stop = True
+ while self._doc_fetching_thread.isRunning():
+ time.sleep(0.001)
+ self._doc_fetching_thread = None
+
+ def refresh(self, force=False):
+ """Refresh the data for the model."""
+ # Skip fetch if there is already other thread fetching documents
+ if self._doc_fetching_thread is not None:
+ if not force:
+ return
+ self.stop_fetch_thread()
+
+ # Clear model items
+ self.clear()
+
+ # Fetch documents from mongo
+ # Restart payload
+ self._doc_payload = {}
+ self._doc_fetching_stop = False
+ self._doc_fetching_thread = lib.create_qthread(self.fetch)
+ self._doc_fetching_thread.start()
+
+ def flags(self, index):
+ return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
+
+ def setData(self, index, value, role=QtCore.Qt.EditRole):
+ if not index.isValid():
+ return False
+
+ if role == self.subsetColorsRole:
+ asset_id = index.data(self.ObjectIdRole)
+ self.asset_colors[asset_id] = value
+
+ if Qt.__binding__ in ("PyQt4", "PySide"):
+ self.dataChanged.emit(index, index)
+ else:
+ self.dataChanged.emit(index, index, [role])
+
+ return True
+
+ return super(AssetModel, self).setData(index, value, role)
+
+ def data(self, index, role):
+ if not index.isValid():
+ return
+
+ item = index.internalPointer()
+ if role == QtCore.Qt.DecorationRole:
+ column = index.column()
+ if column == self.Name:
+ # Allow a custom icon and custom icon color to be defined
+ data = item.get("_document", {}).get("data", {})
+ icon = data.get("icon", None)
+ if icon is None and item.get("type") == "silo":
+ icon = "database"
+ color = data.get("color", style.colors.default)
+
+ if icon is None:
+ # Use default icons if no custom one is specified.
+ # If it has children show a full folder, otherwise
+ # show an open folder
+ has_children = self.rowCount(index) > 0
+ icon = "folder" if has_children else "folder-o"
+
+ # Make the color darker when the asset is deprecated
+ if item.get("deprecated", False):
+ color = QtGui.QColor(color).darker(250)
+
+ try:
+ key = "fa.{0}".format(icon) # font-awesome key
+ icon = qtawesome.icon(key, color=color)
+ return icon
+ except Exception as exception:
+ # Log an error message instead of erroring out completely
+ # when the icon couldn't be created (e.g. invalid name)
+ log.error(exception)
+
+ return
+
+ if role == QtCore.Qt.ForegroundRole: # font color
+ if "deprecated" in item.get("tags", []):
+ return QtGui.QColor(style.colors.light).darker(250)
+
+ if role == self.ObjectIdRole:
+ return item.get("_id", None)
+
+ if role == self.DocumentRole:
+ return item.get("_document", None)
+
+ if role == self.subsetColorsRole:
+ asset_id = item.get("_id", None)
+ return self.asset_colors.get(asset_id) or []
+
+ return super(AssetModel, self).data(index, role)
+
+
+class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel):
+ """Filters to the regex if any of the children matches allow parent"""
+ def filterAcceptsRow(self, row, parent):
+ regex = self.filterRegExp()
+ if not regex.isEmpty():
+ pattern = regex.pattern()
+ model = self.sourceModel()
+ source_index = model.index(row, self.filterKeyColumn(), parent)
+ if source_index.isValid():
+ # Check current index itself
+ key = model.data(source_index, self.filterRole())
+ if re.search(pattern, key, re.IGNORECASE):
+ return True
+
+ # Check children
+ rows = model.rowCount(source_index)
+ for i in range(rows):
+ if self.filterAcceptsRow(i, source_index):
+ return True
+
+ # Otherwise filter it
+ return False
+
+ return super(
+ RecursiveSortFilterProxyModel, self
+ ).filterAcceptsRow(row, parent)
diff --git a/openpype/tools/utils/views.py b/openpype/tools/utils/views.py
new file mode 100644
index 0000000000..bed5655647
--- /dev/null
+++ b/openpype/tools/utils/views.py
@@ -0,0 +1,86 @@
+import os
+from avalon import style
+from Qt import QtWidgets, QtCore, QtGui, QtSvg
+
+
+class DeselectableTreeView(QtWidgets.QTreeView):
+ """A tree view that deselects on clicking on an empty area in the view"""
+
+ def mousePressEvent(self, event):
+
+ index = self.indexAt(event.pos())
+ if not index.isValid():
+ # clear the selection
+ self.clearSelection()
+ # clear the current index
+ self.setCurrentIndex(QtCore.QModelIndex())
+
+ QtWidgets.QTreeView.mousePressEvent(self, event)
+
+
+class TreeViewSpinner(QtWidgets.QTreeView):
+ size = 160
+
+ def __init__(self, parent=None):
+ super(TreeViewSpinner, self).__init__(parent=parent)
+
+ loading_image_path = os.path.join(
+ os.path.dirname(os.path.abspath(style.__file__)),
+ "svg",
+ "spinner-200.svg"
+ )
+ self.spinner = QtSvg.QSvgRenderer(loading_image_path)
+
+ self.is_loading = False
+ self.is_empty = True
+
+ def paint_loading(self, event):
+ rect = event.rect()
+ rect = QtCore.QRectF(rect.topLeft(), rect.bottomRight())
+ rect.moveTo(
+ rect.x() + rect.width() / 2 - self.size / 2,
+ rect.y() + rect.height() / 2 - self.size / 2
+ )
+ rect.setSize(QtCore.QSizeF(self.size, self.size))
+ painter = QtGui.QPainter(self.viewport())
+ self.spinner.render(painter, rect)
+
+ def paint_empty(self, event):
+ painter = QtGui.QPainter(self.viewport())
+ rect = event.rect()
+ rect = QtCore.QRectF(rect.topLeft(), rect.bottomRight())
+ qtext_opt = QtGui.QTextOption(
+ QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
+ )
+ painter.drawText(rect, "No Data", qtext_opt)
+
+ def paintEvent(self, event):
+ if self.is_loading:
+ self.paint_loading(event)
+ elif self.is_empty:
+ self.paint_empty(event)
+ else:
+ super(TreeViewSpinner, self).paintEvent(event)
+
+
+class AssetsView(TreeViewSpinner, DeselectableTreeView):
+ """Item view.
+ This implements a context menu.
+ """
+
+ def __init__(self):
+ super(AssetsView, self).__init__()
+ self.setIndentation(15)
+ self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
+ self.setHeaderHidden(True)
+
+ def mousePressEvent(self, event):
+ index = self.indexAt(event.pos())
+ if not index.isValid():
+ modifiers = QtWidgets.QApplication.keyboardModifiers()
+ if modifiers == QtCore.Qt.ShiftModifier:
+ return
+ elif modifiers == QtCore.Qt.ControlModifier:
+ return
+
+ super(AssetsView, self).mousePressEvent(event)
diff --git a/openpype/tools/utils/widgets.py b/openpype/tools/utils/widgets.py
new file mode 100644
index 0000000000..b9b542c123
--- /dev/null
+++ b/openpype/tools/utils/widgets.py
@@ -0,0 +1,499 @@
+import logging
+import time
+
+from . import lib
+
+from Qt import QtWidgets, QtCore, QtGui
+from avalon.vendor import qtawesome, qargparse
+
+from avalon import style
+
+from .models import AssetModel, RecursiveSortFilterProxyModel
+from .views import AssetsView
+from .delegates import AssetDelegate
+
+log = logging.getLogger(__name__)
+
+
+class AssetWidget(QtWidgets.QWidget):
+ """A Widget to display a tree of assets with filter
+
+ To list the assets of the active project:
+ >>> # widget = AssetWidget()
+ >>> # widget.refresh()
+ >>> # widget.show()
+
+ """
+
+ refresh_triggered = QtCore.Signal() # on model refresh
+ refreshed = QtCore.Signal()
+ selection_changed = QtCore.Signal() # on view selection change
+ current_changed = QtCore.Signal() # on view current index change
+
+ def __init__(self, dbcon, multiselection=False, parent=None):
+ super(AssetWidget, self).__init__(parent=parent)
+
+ self.dbcon = dbcon
+
+ self.setContentsMargins(0, 0, 0, 0)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.setContentsMargins(0, 0, 0, 0)
+ layout.setSpacing(4)
+
+ # Tree View
+ model = AssetModel(dbcon=self.dbcon, parent=self)
+ proxy = RecursiveSortFilterProxyModel()
+ proxy.setSourceModel(model)
+ proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
+
+ view = AssetsView()
+ view.setModel(proxy)
+ if multiselection:
+ asset_delegate = AssetDelegate()
+ view.setSelectionMode(view.ExtendedSelection)
+ view.setItemDelegate(asset_delegate)
+
+ # Header
+ header = QtWidgets.QHBoxLayout()
+
+ icon = qtawesome.icon("fa.arrow-down", color=style.colors.light)
+ set_current_asset_btn = QtWidgets.QPushButton(icon, "")
+ set_current_asset_btn.setToolTip("Go to Asset from current Session")
+ # Hide by default
+ set_current_asset_btn.setVisible(False)
+
+ icon = qtawesome.icon("fa.refresh", color=style.colors.light)
+ refresh = QtWidgets.QPushButton(icon, "")
+ refresh.setToolTip("Refresh items")
+
+ filter = QtWidgets.QLineEdit()
+ filter.textChanged.connect(proxy.setFilterFixedString)
+ filter.setPlaceholderText("Filter assets..")
+
+ header.addWidget(filter)
+ header.addWidget(set_current_asset_btn)
+ header.addWidget(refresh)
+
+ # Layout
+ layout.addLayout(header)
+ layout.addWidget(view)
+
+ # Signals/Slots
+ selection = view.selectionModel()
+ selection.selectionChanged.connect(self.selection_changed)
+ selection.currentChanged.connect(self.current_changed)
+ refresh.clicked.connect(self.refresh)
+ set_current_asset_btn.clicked.connect(self.set_current_session_asset)
+
+ self.set_current_asset_btn = set_current_asset_btn
+ self.model = model
+ self.proxy = proxy
+ self.view = view
+
+ self.model_selection = {}
+
+ def set_current_asset_btn_visibility(self, visible=None):
+ """Hide set current asset button.
+
+ Not all tools support using of current context asset.
+ """
+ if visible is None:
+ visible = not self.set_current_asset_btn.isVisible()
+ self.set_current_asset_btn.setVisible(visible)
+
+ def _refresh_model(self):
+ # Store selection
+ self._store_model_selection()
+ time_start = time.time()
+
+ self.set_loading_state(
+ loading=True,
+ empty=True
+ )
+
+ def on_refreshed(has_item):
+ self.set_loading_state(loading=False, empty=not has_item)
+ self._restore_model_selection()
+ self.model.refreshed.disconnect()
+ self.refreshed.emit()
+ print("Duration: %.3fs" % (time.time() - time_start))
+
+ # Connect to signal
+ self.model.refreshed.connect(on_refreshed)
+ # Trigger signal before refresh is called
+ self.refresh_triggered.emit()
+ # Refresh model
+ self.model.refresh()
+
+ def refresh(self):
+ self._refresh_model()
+
+ def get_active_asset(self):
+ """Return the asset item of the current selection."""
+ current = self.view.currentIndex()
+ return current.data(self.model.ItemRole)
+
+ def get_active_asset_document(self):
+ """Return the asset document of the current selection."""
+ current = self.view.currentIndex()
+ return current.data(self.model.DocumentRole)
+
+ def get_active_index(self):
+ return self.view.currentIndex()
+
+ def get_selected_assets(self):
+ """Return the documents of selected assets."""
+ selection = self.view.selectionModel()
+ rows = selection.selectedRows()
+ assets = [row.data(self.model.DocumentRole) for row in rows]
+
+ # NOTE: skip None object assumed they are silo (backwards comp.)
+ return [asset for asset in assets if asset]
+
+ def select_assets(self, assets, expand=True, key="name"):
+ """Select assets by item key.
+
+ Args:
+ assets (list): List of asset values that can be found under
+ specified `key`
+ expand (bool): Whether to also expand to the asset in the view
+ key (string): Key that specifies where to look for `assets` values
+
+ Returns:
+ None
+
+ Default `key` is "name" in that case `assets` should contain single
+ asset name or list of asset names. (It is good idea to use "_id" key
+ instead of name in that case `assets` must contain `ObjectId` object/s)
+ It is expected that each value in `assets` will be found only once.
+ If the filters according to the `key` and `assets` correspond to
+ the more asset, only the first found will be selected.
+
+ """
+
+ if not isinstance(assets, (tuple, list)):
+ assets = [assets]
+
+ # convert to list - tuple cant be modified
+ assets = set(assets)
+
+ # Clear selection
+ selection_model = self.view.selectionModel()
+ selection_model.clearSelection()
+
+ # Select
+ mode = selection_model.Select | selection_model.Rows
+ for index in lib.iter_model_rows(
+ self.proxy, column=0, include_root=False
+ ):
+ # stop iteration if there are no assets to process
+ if not assets:
+ break
+
+ value = index.data(self.model.ItemRole).get(key)
+ if value not in assets:
+ continue
+
+ # Remove processed asset
+ assets.discard(value)
+
+ selection_model.select(index, mode)
+ if expand:
+ # Expand parent index
+ self.view.expand(self.proxy.parent(index))
+
+ # Set the currently active index
+ self.view.setCurrentIndex(index)
+
+ def set_loading_state(self, loading, empty):
+ if self.view.is_loading != loading:
+ if loading:
+ self.view.spinner.repaintNeeded.connect(
+ self.view.viewport().update
+ )
+ else:
+ self.view.spinner.repaintNeeded.disconnect()
+
+ self.view.is_loading = loading
+ self.view.is_empty = empty
+
+ def _store_model_selection(self):
+ index = self.view.currentIndex()
+ current = None
+ if index and index.isValid():
+ current = index.data(self.model.ObjectIdRole)
+
+ expanded = set()
+ model = self.view.model()
+ for index in lib.iter_model_rows(
+ model, column=0, include_root=False
+ ):
+ if self.view.isExpanded(index):
+ value = index.data(self.model.ObjectIdRole)
+ expanded.add(value)
+
+ selection_model = self.view.selectionModel()
+
+ selected = None
+ selected_rows = selection_model.selectedRows()
+ if selected_rows:
+ selected = set(
+ row.data(self.model.ObjectIdRole)
+ for row in selected_rows
+ )
+
+ self.model_selection = {
+ "expanded": expanded,
+ "selected": selected,
+ "current": current
+ }
+
+ def _restore_model_selection(self):
+ model = self.view.model()
+ not_set = object()
+ expanded = self.model_selection.pop("expanded", not_set)
+ selected = self.model_selection.pop("selected", not_set)
+ current = self.model_selection.pop("current", not_set)
+
+ if (
+ expanded is not_set
+ or selected is not_set
+ or current is not_set
+ ):
+ return
+
+ if expanded:
+ for index in lib.iter_model_rows(
+ model, column=0, include_root=False
+ ):
+ is_expanded = index.data(self.model.ObjectIdRole) in expanded
+ self.view.setExpanded(index, is_expanded)
+
+ if not selected and not current:
+ self.set_current_session_asset()
+ return
+
+ current_index = None
+ selected_indexes = []
+ # Go through all indices, select the ones with similar data
+ for index in lib.iter_model_rows(
+ model, column=0, include_root=False
+ ):
+ object_id = index.data(self.model.ObjectIdRole)
+ if object_id in selected:
+ selected_indexes.append(index)
+
+ if not current_index and object_id == current:
+ current_index = index
+
+ if current_index:
+ self.view.setCurrentIndex(current_index)
+
+ if not selected_indexes:
+ return
+ selection_model = self.view.selectionModel()
+ flags = selection_model.Select | selection_model.Rows
+ for index in selected_indexes:
+ # Ensure item is visible
+ self.view.scrollTo(index)
+ selection_model.select(index, flags)
+
+ def set_current_session_asset(self):
+ asset_name = self.dbcon.Session.get("AVALON_ASSET")
+ if asset_name:
+ self.select_assets([asset_name])
+
+
+class OptionalMenu(QtWidgets.QMenu):
+ """A subclass of `QtWidgets.QMenu` to work with `OptionalAction`
+
+ This menu has reimplemented `mouseReleaseEvent`, `mouseMoveEvent` and
+ `leaveEvent` to provide better action hightlighting and triggering for
+ actions that were instances of `QtWidgets.QWidgetAction`.
+
+ """
+
+ def mouseReleaseEvent(self, event):
+ """Emit option clicked signal if mouse released on it"""
+ active = self.actionAt(event.pos())
+ if active and active.use_option:
+ option = active.widget.option
+ if option.is_hovered(event.globalPos()):
+ option.clicked.emit()
+ super(OptionalMenu, self).mouseReleaseEvent(event)
+
+ def mouseMoveEvent(self, event):
+ """Add highlight to active action"""
+ active = self.actionAt(event.pos())
+ for action in self.actions():
+ action.set_highlight(action is active, event.globalPos())
+ super(OptionalMenu, self).mouseMoveEvent(event)
+
+ def leaveEvent(self, event):
+ """Remove highlight from all actions"""
+ for action in self.actions():
+ action.set_highlight(False)
+ super(OptionalMenu, self).leaveEvent(event)
+
+
+class OptionalAction(QtWidgets.QWidgetAction):
+ """Menu action with option box
+
+ A menu action like Maya's menu item with option box, implemented by
+ subclassing `QtWidgets.QWidgetAction`.
+
+ """
+
+ def __init__(self, label, icon, use_option, parent):
+ super(OptionalAction, self).__init__(parent)
+ self.label = label
+ self.icon = icon
+ self.use_option = use_option
+ self.option_tip = ""
+ self.optioned = False
+
+ def createWidget(self, parent):
+ widget = OptionalActionWidget(self.label, parent)
+ self.widget = widget
+
+ if self.icon:
+ widget.setIcon(self.icon)
+
+ if self.use_option:
+ widget.option.clicked.connect(self.on_option)
+ widget.option.setToolTip(self.option_tip)
+ else:
+ widget.option.setVisible(False)
+
+ return widget
+
+ def set_option_tip(self, options):
+ sep = "\n\n"
+ mak = (lambda opt: opt["name"] + " :\n " + opt["help"])
+ self.option_tip = sep.join(mak(opt) for opt in options)
+
+ def on_option(self):
+ self.optioned = True
+
+ def set_highlight(self, state, global_pos=None):
+ body = self.widget.body
+ option = self.widget.option
+
+ role = QtGui.QPalette.Highlight if state else QtGui.QPalette.Window
+ body.setBackgroundRole(role)
+ body.setAutoFillBackground(state)
+
+ if not self.use_option:
+ return
+
+ state = option.is_hovered(global_pos)
+ role = QtGui.QPalette.Highlight if state else QtGui.QPalette.Window
+ option.setBackgroundRole(role)
+ option.setAutoFillBackground(state)
+
+
+class OptionalActionWidget(QtWidgets.QWidget):
+ """Main widget class for `OptionalAction`"""
+
+ def __init__(self, label, parent=None):
+ super(OptionalActionWidget, self).__init__(parent)
+
+ body = QtWidgets.QWidget()
+ body.setStyleSheet("background: transparent;")
+
+ icon = QtWidgets.QLabel()
+ label = QtWidgets.QLabel(label)
+ option = OptionBox(body)
+
+ icon.setFixedSize(24, 16)
+ option.setFixedSize(30, 30)
+
+ layout = QtWidgets.QHBoxLayout(body)
+ layout.setContentsMargins(0, 0, 0, 0)
+ layout.setSpacing(2)
+ layout.addWidget(icon)
+ layout.addWidget(label)
+ layout.addSpacing(6)
+
+ layout = QtWidgets.QHBoxLayout(self)
+ layout.setContentsMargins(6, 1, 2, 1)
+ layout.setSpacing(0)
+ layout.addWidget(body)
+ layout.addWidget(option)
+
+ body.setMouseTracking(True)
+ label.setMouseTracking(True)
+ option.setMouseTracking(True)
+ self.setMouseTracking(True)
+ self.setFixedHeight(32)
+
+ self.icon = icon
+ self.label = label
+ self.option = option
+ self.body = body
+
+ # (NOTE) For removing ugly QLable shadow FX when highlighted in Nuke.
+ # See https://stackoverflow.com/q/52838690/4145300
+ label.setStyle(QtWidgets.QStyleFactory.create("Plastique"))
+
+ def setIcon(self, icon):
+ pixmap = icon.pixmap(16, 16)
+ self.icon.setPixmap(pixmap)
+
+
+class OptionBox(QtWidgets.QLabel):
+ """Option box widget class for `OptionalActionWidget`"""
+
+ clicked = QtCore.Signal()
+
+ def __init__(self, parent):
+ super(OptionBox, self).__init__(parent)
+
+ self.setAlignment(QtCore.Qt.AlignCenter)
+
+ icon = qtawesome.icon("fa.sticky-note-o", color="#c6c6c6")
+ pixmap = icon.pixmap(18, 18)
+ self.setPixmap(pixmap)
+
+ self.setStyleSheet("background: transparent;")
+
+ def is_hovered(self, global_pos):
+ if global_pos is None:
+ return False
+ pos = self.mapFromGlobal(global_pos)
+ return self.rect().contains(pos)
+
+
+class OptionDialog(QtWidgets.QDialog):
+ """Option dialog shown by option box"""
+
+ def __init__(self, parent=None):
+ super(OptionDialog, self).__init__(parent)
+ self.setModal(True)
+ self._options = dict()
+
+ def create(self, options):
+ parser = qargparse.QArgumentParser(arguments=options)
+
+ decision = QtWidgets.QWidget()
+ accept = QtWidgets.QPushButton("Accept")
+ cancel = QtWidgets.QPushButton("Cancel")
+
+ layout = QtWidgets.QHBoxLayout(decision)
+ layout.addWidget(accept)
+ layout.addWidget(cancel)
+
+ layout = QtWidgets.QVBoxLayout(self)
+ layout.addWidget(parser)
+ layout.addWidget(decision)
+
+ accept.clicked.connect(self.accept)
+ cancel.clicked.connect(self.reject)
+ parser.changed.connect(self.on_changed)
+
+ def on_changed(self, argument):
+ self._options[argument["name"]] = argument.read()
+
+ def parse(self):
+ return self._options.copy()
diff --git a/openpype/tools/workfiles/app.py b/openpype/tools/workfiles/app.py
index 3d2633f8dc..6fff0d0278 100644
--- a/openpype/tools/workfiles/app.py
+++ b/openpype/tools/workfiles/app.py
@@ -376,6 +376,9 @@ class TasksWidget(QtWidgets.QWidget):
task (str): Name of the task to select.
"""
+ task_view_model = self._tasks_view.model()
+ if not task_view_model:
+ return
# Clear selection
selection_model = self._tasks_view.selectionModel()
@@ -383,8 +386,8 @@ class TasksWidget(QtWidgets.QWidget):
# Select the task
mode = selection_model.Select | selection_model.Rows
- for row in range(self._tasks_model.rowCount()):
- index = self._tasks_model.index(row, 0)
+ for row in range(task_view_model.rowCount()):
+ index = task_view_model.index(row, 0)
name = index.data(TASK_NAME_ROLE)
if name == task_name:
selection_model.select(index, mode)
diff --git a/openpype/version.py b/openpype/version.py
index 17bd0ff892..f6ace59d7d 100644
--- a/openpype/version.py
+++ b/openpype/version.py
@@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
-__version__ = "3.4.0-nightly.4"
+__version__ = "3.5.0-nightly.5"
diff --git a/poetry.lock b/poetry.lock
index e011b781c9..10b049cd0a 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -144,6 +144,22 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
pytz = ">=2015.7"
+[[package]]
+name = "bcrypt"
+version = "3.2.0"
+description = "Modern password hashing for your software and your servers"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+cffi = ">=1.1"
+six = ">=1.4.1"
+
+[package.extras]
+tests = ["pytest (>=3.2.1,!=3.3.0)"]
+typecheck = ["mypy"]
+
[[package]]
name = "blessed"
version = "1.18.0"
@@ -313,6 +329,19 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+[[package]]
+name = "dropbox"
+version = "11.20.0"
+description = "Official Dropbox API Client"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+requests = ">=2.16.2"
+six = ">=1.12.0"
+stone = ">=2"
+
[[package]]
name = "enlighten"
version = "1.10.1"
@@ -704,6 +733,25 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
pyparsing = ">=2.0.2"
+[[package]]
+name = "paramiko"
+version = "2.7.2"
+description = "SSH2 protocol library"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+bcrypt = ">=3.1.3"
+cryptography = ">=2.5"
+pynacl = ">=1.0.1"
+
+[package.extras]
+all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
+ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"]
+gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
+invoke = ["invoke (>=1.3)"]
+
[[package]]
name = "parso"
version = "0.8.2"
@@ -749,6 +797,14 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
[package.extras]
dev = ["pre-commit", "tox"]
+[[package]]
+name = "ply"
+version = "3.11"
+description = "Python Lex & Yacc"
+category = "main"
+optional = false
+python-versions = "*"
+
[[package]]
name = "prefixed"
version = "0.3.2"
@@ -888,6 +944,22 @@ srv = ["dnspython (>=1.16.0,<1.17.0)"]
tls = ["ipaddress"]
zstd = ["zstandard"]
+[[package]]
+name = "pynacl"
+version = "1.4.0"
+description = "Python binding to the Networking and Cryptography (NaCl) library"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.dependencies]
+cffi = ">=1.4.1"
+six = "*"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
+tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
+
[[package]]
name = "pynput"
version = "1.7.3"
@@ -977,6 +1049,17 @@ category = "main"
optional = false
python-versions = ">=3.5"
+[[package]]
+name = "pysftp"
+version = "0.2.9"
+description = "A friendly face on SFTP"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+paramiko = ">=1.17"
+
[[package]]
name = "pytest"
version = "6.2.4"
@@ -1069,7 +1152,7 @@ python-versions = "*"
[[package]]
name = "pywin32"
-version = "300"
+version = "301"
description = "Python for Window Extensions"
category = "main"
optional = false
@@ -1341,6 +1424,18 @@ sphinxcontrib-serializinghtml = "*"
lint = ["flake8"]
test = ["pytest", "sqlalchemy", "whoosh", "sphinx"]
+[[package]]
+name = "stone"
+version = "3.2.1"
+description = "Stone is an interface description language (IDL) for APIs."
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+ply = ">=3.4"
+six = ">=1.3.0"
+
[[package]]
name = "termcolor"
version = "1.1.0"
@@ -1466,7 +1561,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt
[metadata]
lock-version = "1.1"
python-versions = "3.7.*"
-content-hash = "8875d530ae66f9763b5b0cb84d9d35edc184ef5c141b63d38bf1ff5a1226e556"
+content-hash = "ff2bfa35a7304378917a0c25d7d7af9f81a130288d95789bdf7429f071e80b69"
[metadata.files]
acre = []
@@ -1553,6 +1648,15 @@ babel = [
{file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"},
{file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"},
]
+bcrypt = [
+ {file = "bcrypt-3.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c95d4cbebffafcdd28bd28bb4e25b31c50f6da605c81ffd9ad8a3d1b2ab7b1b6"},
+ {file = "bcrypt-3.2.0-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:63d4e3ff96188e5898779b6057878fecf3f11cfe6ec3b313ea09955d587ec7a7"},
+ {file = "bcrypt-3.2.0-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:cd1ea2ff3038509ea95f687256c46b79f5fc382ad0aa3664d200047546d511d1"},
+ {file = "bcrypt-3.2.0-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:cdcdcb3972027f83fe24a48b1e90ea4b584d35f1cc279d76de6fc4b13376239d"},
+ {file = "bcrypt-3.2.0-cp36-abi3-win32.whl", hash = "sha256:a67fb841b35c28a59cebed05fbd3e80eea26e6d75851f0574a9273c80f3e9b55"},
+ {file = "bcrypt-3.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:81fec756feff5b6818ea7ab031205e1d323d8943d237303baca2c5f9c7846f34"},
+ {file = "bcrypt-3.2.0.tar.gz", hash = "sha256:5b93c1726e50a93a033c36e5ca7fdcd29a5c7395af50a6892f5d9e7c6cfbfb29"},
+]
blessed = [
{file = "blessed-1.18.0-py2.py3-none-any.whl", hash = "sha256:5b5e2f0563d5a668c282f3f5946f7b1abb70c85829461900e607e74d7725106e"},
{file = "blessed-1.18.0.tar.gz", hash = "sha256:1312879f971330a1b7f2c6341f2ae7e2cbac244bfc9d0ecfbbecd4b0293bc755"},
@@ -1582,24 +1686,36 @@ cffi = [
{file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"},
{file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"},
{file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"},
+ {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f"},
+ {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed"},
+ {file = "cffi-1.14.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55"},
{file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"},
{file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"},
{file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"},
{file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"},
{file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"},
{file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"},
+ {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69"},
+ {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05"},
+ {file = "cffi-1.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc"},
{file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"},
{file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"},
{file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"},
{file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"},
{file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"},
{file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"},
+ {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373"},
+ {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f"},
+ {file = "cffi-1.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76"},
{file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"},
{file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"},
{file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"},
{file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"},
{file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"},
{file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"},
+ {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0"},
+ {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333"},
+ {file = "cffi-1.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7"},
{file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"},
{file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"},
{file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"},
@@ -1692,8 +1808,10 @@ cryptography = [
{file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"},
{file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"},
{file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"},
+ {file = "cryptography-3.4.7-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b01fd6f2737816cb1e08ed4807ae194404790eac7ad030b34f2ce72b332f5586"},
{file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"},
{file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"},
+ {file = "cryptography-3.4.7-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:bf40af59ca2465b24e54f671b2de2c59257ddc4f7e5706dbd6930e26823668d3"},
{file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"},
]
cx-freeze = [
@@ -1730,6 +1848,11 @@ docutils = [
{file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
{file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
]
+dropbox = [
+ {file = "dropbox-11.20.0-py2-none-any.whl", hash = "sha256:0926aab25445fe78b0284e0b86f4126ec4e5e2bf6cd2ac8562002008a21073b8"},
+ {file = "dropbox-11.20.0-py3-none-any.whl", hash = "sha256:f2106aa566f9e3c175879c226c60b7089a39099b228061acbb7258670f6b859c"},
+ {file = "dropbox-11.20.0.tar.gz", hash = "sha256:1aa351ec8bbb11cf3560e731b81d25f39c7edcb5fa92c06c5d68866cb9f90d54"},
+]
enlighten = [
{file = "enlighten-1.10.1-py2.py3-none-any.whl", hash = "sha256:3d6c3eec8cf3eb626ee7b65eddc1b3e904d01f4547a2b9fe7f1da8892a0297e8"},
{file = "enlighten-1.10.1.tar.gz", hash = "sha256:3391916586364aedced5d6926482b48745e4948f822de096d32258ba238ea984"},
@@ -1852,12 +1975,22 @@ log4mongo = [
{file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"},
]
markupsafe = [
+ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"},
+ {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"},
+ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"},
{file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"},
{file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"},
@@ -1866,14 +1999,21 @@ markupsafe = [
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"},
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"},
{file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"},
+ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"},
{file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"},
{file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"},
{file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"},
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"},
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"},
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"},
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"},
{file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"},
+ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"},
{file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"},
{file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"},
{file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"},
@@ -1883,6 +2023,9 @@ markupsafe = [
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"},
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"},
{file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"},
+ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"},
{file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"},
{file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
{file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
@@ -1935,6 +2078,10 @@ packaging = [
{file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"},
{file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"},
]
+paramiko = [
+ {file = "paramiko-2.7.2-py2.py3-none-any.whl", hash = "sha256:4f3e316fef2ac628b05097a637af35685183111d4bc1b5979bd397c2ab7b5898"},
+ {file = "paramiko-2.7.2.tar.gz", hash = "sha256:7f36f4ba2c0d81d219f4595e35f70d56cc94f9ac40a6acdf51d6ca210ce65035"},
+]
parso = [
{file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"},
{file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"},
@@ -1983,6 +2130,10 @@ pluggy = [
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
]
+ply = [
+ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
+ {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
+]
prefixed = [
{file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"},
{file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"},
@@ -2006,9 +2157,13 @@ protobuf = [
{file = "protobuf-3.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ae692bb6d1992afb6b74348e7bb648a75bb0d3565a3f5eea5bec8f62bd06d87"},
{file = "protobuf-3.17.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99938f2a2d7ca6563c0ade0c5ca8982264c484fdecf418bd68e880a7ab5730b1"},
{file = "protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6902a1e4b7a319ec611a7345ff81b6b004b36b0d2196ce7a748b3493da3d226d"},
+ {file = "protobuf-3.17.3-cp38-cp38-win32.whl", hash = "sha256:59e5cf6b737c3a376932fbfb869043415f7c16a0cf176ab30a5bbc419cd709c1"},
+ {file = "protobuf-3.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:ebcb546f10069b56dc2e3da35e003a02076aaa377caf8530fe9789570984a8d2"},
{file = "protobuf-3.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ffbd23640bb7403574f7aff8368e2aeb2ec9a5c6306580be48ac59a6bac8bde"},
{file = "protobuf-3.17.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:26010f693b675ff5a1d0e1bdb17689b8b716a18709113288fead438703d45539"},
{file = "protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76d9686e088fece2450dbc7ee905f9be904e427341d289acbe9ad00b78ebd47"},
+ {file = "protobuf-3.17.3-cp39-cp39-win32.whl", hash = "sha256:a38bac25f51c93e4be4092c88b2568b9f407c27217d3dd23c7a57fa522a17554"},
+ {file = "protobuf-3.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:85d6303e4adade2827e43c2b54114d9a6ea547b671cb63fafd5011dc47d0e13d"},
{file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"},
{file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"},
]
@@ -2144,6 +2299,26 @@ pymongo = [
{file = "pymongo-3.11.4-py2.7-macosx-10.14-intel.egg", hash = "sha256:506a6dab4c7ffdcacdf0b8e70bd20eb2e77fa994519547c9d88d676400fcad58"},
{file = "pymongo-3.11.4.tar.gz", hash = "sha256:539d4cb1b16b57026999c53e5aab857fe706e70ae5310cc8c232479923f932e6"},
]
+pynacl = [
+ {file = "PyNaCl-1.4.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff"},
+ {file = "PyNaCl-1.4.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514"},
+ {file = "PyNaCl-1.4.0-cp27-cp27m-win32.whl", hash = "sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574"},
+ {file = "PyNaCl-1.4.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80"},
+ {file = "PyNaCl-1.4.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7"},
+ {file = "PyNaCl-1.4.0-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122"},
+ {file = "PyNaCl-1.4.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d"},
+ {file = "PyNaCl-1.4.0-cp35-abi3-win32.whl", hash = "sha256:4e10569f8cbed81cb7526ae137049759d2a8d57726d52c1a000a3ce366779634"},
+ {file = "PyNaCl-1.4.0-cp35-abi3-win_amd64.whl", hash = "sha256:c914f78da4953b33d4685e3cdc7ce63401247a21425c16a39760e282075ac4a6"},
+ {file = "PyNaCl-1.4.0-cp35-cp35m-win32.whl", hash = "sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4"},
+ {file = "PyNaCl-1.4.0-cp35-cp35m-win_amd64.whl", hash = "sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25"},
+ {file = "PyNaCl-1.4.0-cp36-cp36m-win32.whl", hash = "sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4"},
+ {file = "PyNaCl-1.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6"},
+ {file = "PyNaCl-1.4.0-cp37-cp37m-win32.whl", hash = "sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f"},
+ {file = "PyNaCl-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f"},
+ {file = "PyNaCl-1.4.0-cp38-cp38-win32.whl", hash = "sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96"},
+ {file = "PyNaCl-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420"},
+ {file = "PyNaCl-1.4.0.tar.gz", hash = "sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505"},
+]
pynput = [
{file = "pynput-1.7.3-py2.py3-none-any.whl", hash = "sha256:fea5777454f896bd79d35393088cd29a089f3b2da166f0848a922b1d5a807d4f"},
{file = "pynput-1.7.3-py3.8.egg", hash = "sha256:6626e8ea9ca482bb5628a7169e1193824e382c4ad3053e40f4f24f41ee7b41c9"},
@@ -2151,6 +2326,7 @@ pynput = [
]
pyobjc-core = [
{file = "pyobjc-core-7.3.tar.gz", hash = "sha256:5081aedf8bb40aac1a8ad95adac9e44e148a882686ded614adf46bb67fd67574"},
+ {file = "pyobjc_core-7.3-1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a1f1e6b457127cbf2b5bd2b94520a7c89fb590b739911eadb2b0499a3a5b0e6f"},
{file = "pyobjc_core-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e93ad769a20b908778fe950f62a843a6d8f0fa71996e5f3cc9fab5ae7d17771"},
{file = "pyobjc_core-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f63fd37bbf3785af4ddb2f86cad5ca81c62cfc7d1c0099637ca18343c3656c1"},
{file = "pyobjc_core-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9b1311f72f2e170742a7ee3a8149f52c35158dc024a21e88d6f1e52ba5d718b"},
@@ -2159,6 +2335,7 @@ pyobjc-core = [
]
pyobjc-framework-cocoa = [
{file = "pyobjc-framework-Cocoa-7.3.tar.gz", hash = "sha256:b18d05e7a795a3455ad191c3e43d6bfa673c2a4fd480bb1ccf57191051b80b7e"},
+ {file = "pyobjc_framework_Cocoa-7.3-1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1e31376806e5de883a1d7c7c87d9ff2a8b09fc05d267e0dfce6e42409fb70c67"},
{file = "pyobjc_framework_Cocoa-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9edffdfa6dd1f71f21b531c3e61fdd3e4d5d3bf6c5a528c98e88828cd60bac11"},
{file = "pyobjc_framework_Cocoa-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35a6340437a4e0109a302150b7d1f6baf57004ccf74834f9e6062fcafe2fd8d7"},
{file = "pyobjc_framework_Cocoa-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c3886f2608ab3ed02482f8b2ebf9f782b324c559e84b52cfd92dba8a1109872"},
@@ -2167,6 +2344,7 @@ pyobjc-framework-cocoa = [
]
pyobjc-framework-quartz = [
{file = "pyobjc-framework-Quartz-7.3.tar.gz", hash = "sha256:98812844c34262def980bdf60923a875cd43428a8375b6fd53bd2cd800eccf0b"},
+ {file = "pyobjc_framework_Quartz-7.3-1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1139bc6874c0f8b58f0b8602015e0994198bc506a6bcec1071208de32b55ed26"},
{file = "pyobjc_framework_Quartz-7.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ef18f5a16511ded65980bf4f5983ea5d35c88224dbad1b3112abd29c60413ea"},
{file = "pyobjc_framework_Quartz-7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b41eec8d4b10c7c7e011e2f9051367f5499ef315ba52dfbae573c3a2e05469c"},
{file = "pyobjc_framework_Quartz-7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c65456ed045dfe1711d0298734e5a3ad670f8c770f7eb3b19979256c388bdd2"},
@@ -2212,6 +2390,9 @@ pyqt5-sip = [
pyrsistent = [
{file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"},
]
+pysftp = [
+ {file = "pysftp-0.2.9.tar.gz", hash = "sha256:fbf55a802e74d663673400acd92d5373c1c7ee94d765b428d9f977567ac4854a"},
+]
pytest = [
{file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"},
{file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"},
@@ -2240,16 +2421,16 @@ pytz = [
{file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"},
]
pywin32 = [
- {file = "pywin32-300-cp35-cp35m-win32.whl", hash = "sha256:1c204a81daed2089e55d11eefa4826c05e604d27fe2be40b6bf8db7b6a39da63"},
- {file = "pywin32-300-cp35-cp35m-win_amd64.whl", hash = "sha256:350c5644775736351b77ba68da09a39c760d75d2467ecec37bd3c36a94fbed64"},
- {file = "pywin32-300-cp36-cp36m-win32.whl", hash = "sha256:a3b4c48c852d4107e8a8ec980b76c94ce596ea66d60f7a697582ea9dce7e0db7"},
- {file = "pywin32-300-cp36-cp36m-win_amd64.whl", hash = "sha256:27a30b887afbf05a9cbb05e3ffd43104a9b71ce292f64a635389dbad0ed1cd85"},
- {file = "pywin32-300-cp37-cp37m-win32.whl", hash = "sha256:d7e8c7efc221f10d6400c19c32a031add1c4a58733298c09216f57b4fde110dc"},
- {file = "pywin32-300-cp37-cp37m-win_amd64.whl", hash = "sha256:8151e4d7a19262d6694162d6da85d99a16f8b908949797fd99c83a0bfaf5807d"},
- {file = "pywin32-300-cp38-cp38-win32.whl", hash = "sha256:fbb3b1b0fbd0b4fc2a3d1d81fe0783e30062c1abed1d17c32b7879d55858cfae"},
- {file = "pywin32-300-cp38-cp38-win_amd64.whl", hash = "sha256:60a8fa361091b2eea27f15718f8eb7f9297e8d51b54dbc4f55f3d238093d5190"},
- {file = "pywin32-300-cp39-cp39-win32.whl", hash = "sha256:638b68eea5cfc8def537e43e9554747f8dee786b090e47ead94bfdafdb0f2f50"},
- {file = "pywin32-300-cp39-cp39-win_amd64.whl", hash = "sha256:b1609ce9bd5c411b81f941b246d683d6508992093203d4eb7f278f4ed1085c3f"},
+ {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"},
+ {file = "pywin32-301-cp35-cp35m-win_amd64.whl", hash = "sha256:9635df6998a70282bd36e7ac2a5cef9ead1627b0a63b17c731312c7a0daebb72"},
+ {file = "pywin32-301-cp36-cp36m-win32.whl", hash = "sha256:c866f04a182a8cb9b7855de065113bbd2e40524f570db73ef1ee99ff0a5cc2f0"},
+ {file = "pywin32-301-cp36-cp36m-win_amd64.whl", hash = "sha256:dafa18e95bf2a92f298fe9c582b0e205aca45c55f989937c52c454ce65b93c78"},
+ {file = "pywin32-301-cp37-cp37m-win32.whl", hash = "sha256:98f62a3f60aa64894a290fb7494bfa0bfa0a199e9e052e1ac293b2ad3cd2818b"},
+ {file = "pywin32-301-cp37-cp37m-win_amd64.whl", hash = "sha256:fb3b4933e0382ba49305cc6cd3fb18525df7fd96aa434de19ce0878133bf8e4a"},
+ {file = "pywin32-301-cp38-cp38-win32.whl", hash = "sha256:88981dd3cfb07432625b180f49bf4e179fb8cbb5704cd512e38dd63636af7a17"},
+ {file = "pywin32-301-cp38-cp38-win_amd64.whl", hash = "sha256:8c9d33968aa7fcddf44e47750e18f3d034c3e443a707688a008a2e52bbef7e96"},
+ {file = "pywin32-301-cp39-cp39-win32.whl", hash = "sha256:595d397df65f1b2e0beaca63a883ae6d8b6df1cdea85c16ae85f6d2e648133fe"},
+ {file = "pywin32-301-cp39-cp39-win_amd64.whl", hash = "sha256:87604a4087434cd814ad8973bd47d6524bd1fa9e971ce428e76b62a5e0860fdf"},
]
pywin32-ctypes = [
{file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"},
@@ -2339,6 +2520,11 @@ sphinxcontrib-websupport = [
{file = "sphinxcontrib-websupport-1.2.4.tar.gz", hash = "sha256:4edf0223a0685a7c485ae5a156b6f529ba1ee481a1417817935b20bde1956232"},
{file = "sphinxcontrib_websupport-1.2.4-py2.py3-none-any.whl", hash = "sha256:6fc9287dfc823fe9aa432463edd6cea47fa9ebbf488d7f289b322ffcfca075c7"},
]
+stone = [
+ {file = "stone-3.2.1-py2-none-any.whl", hash = "sha256:2a50866528f60cc7cedd010def733e8ae9d581d17f967278a08059bffaea3c57"},
+ {file = "stone-3.2.1-py3-none-any.whl", hash = "sha256:76235137c09ee88aa53e8c1e666819f6c20ac8064c4ac6c4ee4194eac0e3b7af"},
+ {file = "stone-3.2.1.tar.gz", hash = "sha256:9bc78b40143b4ef33bf569e515408c2996ffebefbb1a897616ebe8aa6f2d7e75"},
+]
termcolor = [
{file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"},
]
diff --git a/pyproject.toml b/pyproject.toml
index e376986606..fb47c7eff6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -55,7 +55,7 @@ speedcopy = "^2.1"
six = "^1.15"
semver = "^2.13.0" # for version resolution
wsrpc_aiohttp = "^3.1.1" # websocket server
-pywin32 = { version = "300", markers = "sys_platform == 'win32'" }
+pywin32 = { version = "301", markers = "sys_platform == 'win32'" }
jinxed = [
{ version = "^1.0.1", markers = "sys_platform == 'darwin'" },
{ version = "^1.0.1", markers = "sys_platform == 'linux'" }
@@ -63,6 +63,8 @@ jinxed = [
python3-xlib = { version="*", markers = "sys_platform == 'linux'"}
enlighten = "^1.9.0"
slack-sdk = "^3.6.0"
+pysftp = "^0.2.9"
+dropbox = "^11.20.0"
[tool.poetry.dev-dependencies]
flake8 = "^3.7"
diff --git a/repos/avalon-core b/repos/avalon-core
index f48fce09c0..4b80f81e66 160000
--- a/repos/avalon-core
+++ b/repos/avalon-core
@@ -1 +1 @@
-Subproject commit f48fce09c0986c1fd7f6731de33907be46b436c5
+Subproject commit 4b80f81e66aca593784be8b299110a0b6541276f
diff --git a/start.py b/start.py
index 00f9a50cbb..ada613b4eb 100644
--- a/start.py
+++ b/start.py
@@ -96,12 +96,12 @@ Attributes:
import os
import re
import sys
+import platform
import traceback
import subprocess
import site
from pathlib import Path
-
# OPENPYPE_ROOT is variable pointing to build (or code) directory
# WARNING `OPENPYPE_ROOT` must be defined before igniter import
# - igniter changes cwd which cause that filepath of this script won't lead
@@ -189,6 +189,7 @@ else:
import igniter # noqa: E402
from igniter import BootstrapRepos # noqa: E402
from igniter.tools import (
+ get_openpype_global_settings,
get_openpype_path_from_db,
validate_mongo_connection
) # noqa
@@ -274,6 +275,35 @@ def run(arguments: list, env: dict = None) -> int:
return p.returncode
+def run_disk_mapping_commands(mongo_url):
+ """ Run disk mapping command
+
+ Used to map shared disk for OP to pull codebase.
+ """
+ settings = get_openpype_global_settings(mongo_url)
+
+ low_platform = platform.system().lower()
+ disk_mapping = settings.get("disk_mapping")
+ if not disk_mapping:
+ return
+
+ for mapping in disk_mapping.get(low_platform):
+ source, destination = mapping
+
+ args = ["subst", destination.rstrip('/'), source.rstrip('/')]
+ _print("disk mapping args:: {}".format(args))
+ try:
+ output = subprocess.Popen(args)
+ if output.returncode and output.returncode != 0:
+ exc_msg = "Executing args was not successful: \"{}\"".format(
+ args)
+
+ raise RuntimeError(exc_msg)
+ except TypeError:
+ _print("Error in mapping drive")
+ raise
+
+
def set_avalon_environments():
"""Set avalon specific environments.
@@ -339,6 +369,89 @@ def set_modules_environments():
os.environ.update(env)
+def is_tool(name):
+ try:
+ import os.errno as errno
+ except ImportError:
+ import errno
+
+ try:
+ devnull = open(os.devnull, "w")
+ subprocess.Popen(
+ [name], stdout=devnull, stderr=devnull
+ ).communicate()
+ except OSError as exc:
+ if exc.errno == errno.ENOENT:
+ return False
+ return True
+
+
+def _startup_validations():
+ """Validations before OpenPype starts."""
+ try:
+ _validate_thirdparty_binaries()
+ except Exception as exc:
+ if os.environ.get("OPENPYPE_HEADLESS_MODE"):
+ raise
+
+ import tkinter
+ from tkinter.messagebox import showerror
+
+ root = tkinter.Tk()
+ root.attributes("-alpha", 0.0)
+ root.wm_state("iconic")
+ if platform.system().lower() != "windows":
+ root.withdraw()
+
+ showerror(
+ "Startup validations didn't pass",
+ str(exc)
+ )
+ root.withdraw()
+ sys.exit(1)
+
+
+def _validate_thirdparty_binaries():
+ """Check existence of thirdpart executables."""
+ low_platform = platform.system().lower()
+ binary_vendors_dir = os.path.join(
+ os.environ["OPENPYPE_ROOT"],
+ "vendor",
+ "bin"
+ )
+
+ error_msg = (
+ "Missing binary dependency {}. Please fetch thirdparty dependencies."
+ )
+ # Validate existence of FFmpeg
+ ffmpeg_dir = os.path.join(binary_vendors_dir, "ffmpeg", low_platform)
+ if low_platform == "windows":
+ ffmpeg_dir = os.path.join(ffmpeg_dir, "bin")
+ ffmpeg_executable = os.path.join(ffmpeg_dir, "ffmpeg")
+ if not is_tool(ffmpeg_executable):
+ raise RuntimeError(error_msg.format("FFmpeg"))
+
+ # Validate existence of OpenImageIO (not on MacOs)
+ oiio_tool_path = None
+ if low_platform == "linux":
+ oiio_tool_path = os.path.join(
+ binary_vendors_dir,
+ "oiio",
+ low_platform,
+ "bin",
+ "oiiotool"
+ )
+ elif low_platform == "windows":
+ oiio_tool_path = os.path.join(
+ binary_vendors_dir,
+ "oiio",
+ low_platform,
+ "oiiotool"
+ )
+ if oiio_tool_path is not None and not is_tool(oiio_tool_path):
+ raise RuntimeError(error_msg.format("OpenImageIO"))
+
+
def _process_arguments() -> tuple:
"""Process command line arguments.
@@ -767,6 +880,11 @@ def boot():
# ------------------------------------------------------------------------
os.environ["OPENPYPE_ROOT"] = OPENPYPE_ROOT
+ # ------------------------------------------------------------------------
+ # Do necessary startup validations
+ # ------------------------------------------------------------------------
+ _startup_validations()
+
# ------------------------------------------------------------------------
# Play animation
# ------------------------------------------------------------------------
@@ -806,6 +924,9 @@ def boot():
os.environ["OPENPYPE_MONGO"] = openpype_mongo
os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" # name of Pype database
+ _print(">>> run disk mapping command ...")
+ run_disk_mapping_commands(openpype_mongo)
+
# Get openpype path from database and set it to environment so openpype can
# find its versions there and bootstrap them.
openpype_path = get_openpype_path_from_db(openpype_mongo)
diff --git a/tests/README.md b/tests/README.md
index e69de29bb2..6317b2ab3c 100644
--- a/tests/README.md
+++ b/tests/README.md
@@ -0,0 +1,25 @@
+Automatic tests for OpenPype
+============================
+Structure:
+- integration - end to end tests, slow (see README.md in the integration folder for more info)
+ - openpype/modules/MODULE_NAME - structure follow directory structure in code base
+ - fixture - sample data `(MongoDB dumps, test files etc.)`
+ - `tests.py` - single or more pytest files for MODULE_NAME
+- unit - quick unit test
+ - MODULE_NAME
+ - fixture
+ - `tests.py`
+
+How to run:
+----------
+- single test class could be run by PyCharm and its pytest runner directly
+- OR
+- use Openpype command 'runtests' from command line
+-- `${OPENPYPE_ROOT}/start.py runtests`
+
+By default, this command will run all tests in ${OPENPYPE_ROOT}/tests.
+
+Specific location could be provided to this command as an argument, either as absolute path, or relative path to ${OPENPYPE_ROOT}.
+(eg. `${OPENPYPE_ROOT}/start.py runtests ../tests/integration`) will trigger only tests in `integration` folder.
+
+See `${OPENPYPE_ROOT}/cli.py:runtests` for other arguments.
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integration/README.md b/tests/integration/README.md
new file mode 100644
index 0000000000..81c07ec50c
--- /dev/null
+++ b/tests/integration/README.md
@@ -0,0 +1,37 @@
+Integration test for OpenPype
+=============================
+Contains end-to-end tests for automatic testing of OP.
+
+Should run headless publish on all hosts to check basic publish use cases automatically
+to limit regression issues.
+
+How to create test for publishing from host
+------------------------------------------
+- Extend PublishTest
+- Use `resources\test_data.zip` skeleton file as a template for testing input data
+- Put workfile into `test_data.zip/input/workfile`
+- If you require other than base DB dumps provide them to `test_data.zip/input/dumps`
+-- (Check commented code in `db_handler.py` how to dump specific DB. Currently all collections will be dumped.)
+- Implement `last_workfile_path`
+- `startup_scripts` - must contain pointing host to startup script saved into `test_data.zip/input/startup`
+ -- Script must contain something like
+```
+import openpype
+from avalon import api, HOST
+
+api.install(HOST)
+pyblish.util.publish()
+
+EXIT_APP (command to exit host)
+```
+(Install and publish methods must be triggered only AFTER host app is fully initialized!)
+- Zip `test_data.zip`, named it with descriptive name, upload it to Google Drive, right click - `Get link`, copy hash id
+- Put this hash id and zip file name into TEST_FILES [(HASH_ID, FILE_NAME, MD5_OPTIONAL)]. If you want to check MD5 of downloaded
+file, provide md5 value of zipped file.
+- Implement any assert checks you need in extended class
+- Run test class manually (via Pycharm or pytest runner (TODO))
+- If you want test to compare expected files to published one, set PERSIST to True, run test manually
+ -- Locate temporary `publish` subfolder of temporary folder (found in debugging console log)
+ -- Copy whole folder content into .zip file into `expected` subfolder
+ -- By default tests are comparing only structure of `expected` and published format (eg. if you want to save space, replace published files with empty files, but with expected names!)
+ -- Zip and upload again, change PERSIST to False
\ No newline at end of file
diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py
new file mode 100644
index 0000000000..1babf30029
--- /dev/null
+++ b/tests/integration/hosts/maya/test_publish_in_maya.py
@@ -0,0 +1,103 @@
+import pytest
+import os
+import shutil
+
+from tests.lib.testing_classes import PublishTest
+
+
+class TestPublishInMaya(PublishTest):
+ """Basic test case for publishing in Maya
+
+ Shouldnt be running standalone only via 'runtests' pype command! (??)
+
+ Uses generic TestCase to prepare fixtures for test data, testing DBs,
+ env vars.
+
+ Opens Maya, run publish on prepared workile.
+
+ Then checks content of DB (if subset, version, representations were
+ created.
+ Checks tmp folder if all expected files were published.
+
+ """
+ PERSIST = True
+
+ TEST_FILES = [
+ ("1pOwjA_VVBc6ooTZyFxtAwLS2KZHaBlkY", "test_maya_publish.zip", "")
+ ]
+
+ APP = "maya"
+ APP_VARIANT = "2019"
+
+ APP_NAME = "{}/{}".format(APP, APP_VARIANT)
+
+ TIMEOUT = 120 # publish timeout
+
+ @pytest.fixture(scope="module")
+ def last_workfile_path(self, download_test_data):
+ """Get last_workfile_path from source data.
+
+ Maya expects workfile in proper folder, so copy is done first.
+ """
+ src_path = os.path.join(download_test_data,
+ "input",
+ "workfile",
+ "test_project_test_asset_TestTask_v001.mb")
+ dest_folder = os.path.join(download_test_data,
+ self.PROJECT,
+ self.ASSET,
+ "work",
+ self.TASK)
+ os.makedirs(dest_folder)
+ dest_path = os.path.join(dest_folder,
+ "test_project_test_asset_TestTask_v001.mb")
+ shutil.copy(src_path, dest_path)
+
+ yield dest_path
+
+ @pytest.fixture(scope="module")
+ def startup_scripts(self, monkeypatch_session, download_test_data):
+ """Points Maya to userSetup file from input data"""
+ startup_path = os.path.join(download_test_data,
+ "input",
+ "startup")
+ original_pythonpath = os.environ.get("PYTHONPATH")
+ monkeypatch_session.setenv("PYTHONPATH",
+ "{}{}{}".format(startup_path,
+ os.pathsep,
+ original_pythonpath))
+
+ def test_db_asserts(self, dbcon, publish_finished):
+ """Host and input data dependent expected results in DB."""
+ print("test_db_asserts")
+ assert 5 == dbcon.count_documents({"type": "version"}), \
+ "Not expected no of versions"
+
+ assert 0 == dbcon.count_documents({"type": "version",
+ "name": {"$ne": 1}}), \
+ "Only versions with 1 expected"
+
+ assert 1 == dbcon.count_documents({"type": "subset",
+ "name": "modelMain"}), \
+ "modelMain subset must be present"
+
+ assert 1 == dbcon.count_documents({"type": "subset",
+ "name": "workfileTest_task"}), \
+ "workfileTest_task subset must be present"
+
+ assert 11 == dbcon.count_documents({"type": "representation"}), \
+ "Not expected no of representations"
+
+ assert 2 == dbcon.count_documents({"type": "representation",
+ "context.subset": "modelMain",
+ "context.ext": "abc"}), \
+ "Not expected no of representations with ext 'abc'"
+
+ assert 2 == dbcon.count_documents({"type": "representation",
+ "context.subset": "modelMain",
+ "context.ext": "ma"}), \
+ "Not expected no of representations with ext 'abc'"
+
+
+if __name__ == "__main__":
+ test_case = TestPublishInMaya()
diff --git a/tests/lib/README.md b/tests/lib/README.md
new file mode 100644
index 0000000000..0384cd2ff0
--- /dev/null
+++ b/tests/lib/README.md
@@ -0,0 +1,46 @@
+Automatic testing
+-----------------
+Folder for libs and tooling for automatic testing.
+
+- db_handler.py - class for preparation of test DB
+ - dumps DB(s) to BSON (mongodump)
+ - loads dump(s) to new DB (mongorestore)
+ - loads sql file(s) to DB (mongoimport)
+ - deletes test DB
+
+- file_handler.py - class to download test data from GDrive
+ - downloads data from (list) of files from GDrive
+ - check file integrity with MD5 hash
+ - unzips if zip
+
+- testing_wrapper.py - base class to use for testing
+ - all env var necessary for running (OPENPYPE_MONGO ...)
+ - implements reusable fixtures to:
+ - load test data (uses `file_handler`)
+ - prepare DB (uses `db_handler`)
+ - modify temporarily env vars for testing
+
+ Should be used as a skeleton to create new test cases.
+
+
+Test data
+---------
+Each class implementing `TestCase` can provide test file(s) by adding them to
+TEST_FILES ('GDRIVE_FILE_ID', 'ACTUAL_FILE_NAME', 'MD5HASH')
+
+GDRIVE_FILE_ID can be pulled from shareable link from Google Drive app.
+
+Currently it is expected that test file will be zip file with structure:
+- expected - expected files (not implemented yet)
+- input
+ - data - test data (workfiles, images etc)
+ - dumps - folder for BSON dumps from (`mongodump`)
+ - env_vars
+ env_vars.json - dictionary with environment variables {key:value}
+
+ - json - json files to load with `mongoimport` (human readable)
+
+
+Example
+-------
+See `tests\unit\openpype\modules\sync_server\test_site_operations.py` for example usage of implemented classes.
\ No newline at end of file
diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py
new file mode 100644
index 0000000000..9be70895da
--- /dev/null
+++ b/tests/lib/db_handler.py
@@ -0,0 +1,231 @@
+"""
+ Helper class for automatic testing, provides dump and restore via command
+ line utilities.
+
+ Expect mongodump, mongoimport and mongorestore present at PATH
+"""
+import os
+import pymongo
+import subprocess
+
+
+class DBHandler:
+
+ def __init__(self, uri=None, host=None, port=None,
+ user=None, password=None):
+ """'uri' or rest of separate credentials"""
+ if uri:
+ self.uri = uri
+ if host:
+ if all([user, password]):
+ host = "{}:{}@{}".format(user, password, host)
+ self.uri = 'mongodb://{}:{}'.format(host, port or 27017)
+
+ assert self.uri, "Must have uri to MongoDB"
+ self.client = pymongo.MongoClient(uri)
+ self.db = None
+
+ def setup_empty(self, name):
+ # not much sense
+ self.db = self.client[name]
+
+ def setup_from_sql(self, db_name, sql_dir, collection=None,
+ drop=True, mode=None):
+ """
+ Restores 'db_name' from 'sql_url'.
+
+ Works with directory with .json files,
+ if 'collection' arg is empty, name
+ of .json file is used as name of target collection.
+
+ Args:
+ db_name (str): source DB name
+ sql_dir (str): folder with json files
+ collection (str): if all sql files are meant for single coll.
+ drop (bool): True if drop whole collection
+ mode (str): "insert" - fails on duplicates
+ "upsert" - modifies existing
+ "merge" - updates existing
+ "delete" - removes in DB present if file
+ """
+ if not os.path.exists(sql_dir):
+ raise RuntimeError(
+ "Backup folder {} doesn't exist".format(sql_dir))
+
+ for (dirpath, _dirnames, filenames) in os.walk(sql_dir):
+ for file_name in filenames:
+ sql_url = os.path.join(dirpath, file_name)
+ query = self._import_query(self.uri, sql_url,
+ db_name=db_name,
+ collection=collection,
+ drop=drop,
+ mode=mode)
+
+ print("mongoimport query:: {}".format(query))
+ subprocess.run(query)
+
+ def setup_from_sql_file(self, db_name, sql_url,
+ collection=None, drop=True, mode=None):
+ """
+ Restores 'db_name' from 'sql_url'.
+
+ Works with single .json file.
+ If 'collection' arg is empty, name
+ of .json file is used as name of target collection.
+
+ Args:
+ db_name (str): source DB name
+ sql_file (str): folder with json files
+ collection (str): name of target collection
+ drop (bool): True if drop collection
+ mode (str): "insert" - fails on duplicates
+ "upsert" - modifies existing
+ "merge" - updates existing
+ "delete" - removes in DB present if file
+ """
+ if not os.path.exists(sql_url):
+ raise RuntimeError(
+ "Sql file {} doesn't exist".format(sql_url))
+
+ query = self._import_query(self.uri, sql_url,
+ db_name=db_name,
+ collection=collection,
+ drop=drop,
+ mode=mode)
+
+ print("mongoimport query:: {}".format(query))
+ subprocess.run(query)
+
+ def setup_from_dump(self, db_name, dump_dir, overwrite=False,
+ collection=None, db_name_out=None):
+ """
+ Restores 'db_name' from 'dump_dir'.
+
+ Works with BSON folders exported by mongodump
+
+ Args:
+ db_name (str): source DB name
+ dump_dir (str): folder with dumped subfolders
+ overwrite (bool): True if overwrite target
+ collection (str): name of source project
+ db_name_out (str): name of target DB, if empty restores to
+ source 'db_name'
+ """
+ db_name_out = db_name_out or db_name
+ if self._db_exists(db_name) and not overwrite:
+ raise RuntimeError("DB {} already exists".format(db_name_out) +
+ "Run with overwrite=True")
+
+ dir_path = os.path.join(dump_dir, db_name)
+ if not os.path.exists(dir_path):
+ raise RuntimeError(
+ "Backup folder {} doesn't exist".format(dir_path))
+
+ query = self._restore_query(self.uri, dump_dir,
+ db_name=db_name, db_name_out=db_name_out,
+ collection=collection)
+ print("mongorestore query:: {}".format(query))
+ subprocess.run(query)
+
+ def teardown(self, db_name):
+ """Drops 'db_name' if exists."""
+ if not self._db_exists(db_name):
+ print("{} doesn't exist".format(db_name))
+ return
+
+ print("Dropping {} database".format(db_name))
+ self.client.drop_database(db_name)
+
+ def backup_to_dump(self, db_name, dump_dir, overwrite=False):
+ """
+ Helper method for running mongodump for specific 'db_name'
+ """
+ if not self._db_exists(db_name) and not overwrite:
+ raise RuntimeError("DB {} doesn't exists".format(db_name))
+
+ dir_path = os.path.join(dump_dir, db_name)
+ if os.path.exists(dir_path) and not overwrite:
+ raise RuntimeError("Backup already exists, "
+ "run with overwrite=True")
+
+ query = self._dump_query(self.uri, dump_dir, db_name=db_name)
+ print("Mongodump query:: {}".format(query))
+ subprocess.run(query)
+
+ def _db_exists(self, db_name):
+ return db_name in self.client.list_database_names()
+
+ def _dump_query(self, uri, output_path, db_name=None, collection=None):
+ """Prepares dump query based on 'db_name' or 'collection'."""
+ db_part = coll_part = ""
+ if db_name:
+ db_part = "--db={}".format(db_name)
+ if collection:
+ if not db_name:
+ raise ValueError("db_name must be present")
+ coll_part = "--nsInclude={}.{}".format(db_name, collection)
+ query = "\"{}\" --uri=\"{}\" --out={} {} {}".format(
+ "mongodump", uri, output_path, db_part, coll_part
+ )
+
+ return query
+
+ def _restore_query(self, uri, dump_dir,
+ db_name=None, db_name_out=None,
+ collection=None, drop=True):
+ """Prepares query for mongorestore base on arguments"""
+ db_part = coll_part = drop_part = ""
+ if db_name:
+ db_part = "--nsInclude={}.* --nsFrom={}.*".format(db_name, db_name)
+ if collection:
+ assert db_name, "Must provide db name too"
+ db_part = "--nsInclude={}.{} --nsFrom={}.{}".format(db_name,
+ collection,
+ db_name,
+ collection)
+ if drop:
+ drop_part = "--drop"
+
+ if db_name_out:
+ db_part += " --nsTo={}.*".format(db_name_out)
+
+ query = "\"{}\" --uri=\"{}\" --dir=\"{}\" {} {} {}".format(
+ "mongorestore", uri, dump_dir, db_part, coll_part, drop_part
+ )
+
+ return query
+
+ def _import_query(self, uri, sql_url,
+ db_name=None,
+ collection=None, drop=True, mode=None):
+
+ db_part = coll_part = drop_part = mode_part = ""
+ if db_name:
+ db_part = "--db {}".format(db_name)
+ if collection:
+ assert db_name, "Must provide db name too"
+ coll_part = "--collection {}".format(collection)
+ if drop:
+ drop_part = "--drop"
+ if mode:
+ mode_part = "--mode {}".format(mode)
+
+ query = \
+ "\"{}\" --legacy --uri=\"{}\" --file=\"{}\" {} {} {} {}".format(
+ "mongoimport", uri, sql_url,
+ db_part, coll_part, drop_part, mode_part)
+
+ return query
+
+# handler = DBHandler(uri="mongodb://localhost:27017")
+#
+# backup_dir = "c:\\projects\\dumps"
+# #
+# handler.backup_to_dump("openpype", backup_dir, True)
+# # handler.setup_from_dump("test_db", backup_dir, True)
+# # handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql",
+# # collection="test_project",
+# # drop=False, mode="upsert")
+# handler.setup_from_sql("test_db", "c:\\projects\\sql",
+# collection="test_project",
+# drop=False, mode="upsert")
diff --git a/tests/lib/file_handler.py b/tests/lib/file_handler.py
new file mode 100644
index 0000000000..ee3abc6ecb
--- /dev/null
+++ b/tests/lib/file_handler.py
@@ -0,0 +1,260 @@
+import enlighten
+import os
+import re
+import urllib
+from urllib.parse import urlparse
+import urllib.request
+import urllib.error
+import itertools
+import hashlib
+import tarfile
+import zipfile
+
+
+USER_AGENT = "openpype"
+
+
+class RemoteFileHandler:
+ """Download file from url, might be GDrive shareable link"""
+
+ IMPLEMENTED_ZIP_FORMATS = ['zip', 'tar', 'tgz',
+ 'tar.gz', 'tar.xz', 'tar.bz2']
+
+ @staticmethod
+ def calculate_md5(fpath, chunk_size):
+ md5 = hashlib.md5()
+ with open(fpath, 'rb') as f:
+ for chunk in iter(lambda: f.read(chunk_size), b''):
+ md5.update(chunk)
+ return md5.hexdigest()
+
+ @staticmethod
+ def check_md5(fpath, md5, **kwargs):
+ return md5 == RemoteFileHandler.calculate_md5(fpath, **kwargs)
+
+ @staticmethod
+ def check_integrity(fpath, md5=None):
+ if not os.path.isfile(fpath):
+ return False
+ if md5 is None:
+ return True
+ return RemoteFileHandler.check_md5(fpath, md5)
+
+ @staticmethod
+ def download_url(
+ url, root, filename=None,
+ md5=None, max_redirect_hops=3
+ ):
+ """Download a file from a url and place it in root.
+ Args:
+ url (str): URL to download file from
+ root (str): Directory to place downloaded file in
+ filename (str, optional): Name to save the file under.
+ If None, use the basename of the URL
+ md5 (str, optional): MD5 checksum of the download.
+ If None, do not check
+ max_redirect_hops (int, optional): Maximum number of redirect
+ hops allowed
+ """
+ root = os.path.expanduser(root)
+ if not filename:
+ filename = os.path.basename(url)
+ fpath = os.path.join(root, filename)
+
+ os.makedirs(root, exist_ok=True)
+
+ # check if file is already present locally
+ if RemoteFileHandler.check_integrity(fpath, md5):
+ print('Using downloaded and verified file: ' + fpath)
+ return
+
+ # expand redirect chain if needed
+ url = RemoteFileHandler._get_redirect_url(url,
+ max_hops=max_redirect_hops)
+
+ # check if file is located on Google Drive
+ file_id = RemoteFileHandler._get_google_drive_file_id(url)
+ if file_id is not None:
+ return RemoteFileHandler.download_file_from_google_drive(
+ file_id, root, filename, md5)
+
+ # download the file
+ try:
+ print('Downloading ' + url + ' to ' + fpath)
+ RemoteFileHandler._urlretrieve(url, fpath)
+ except (urllib.error.URLError, IOError) as e:
+ if url[:5] == 'https':
+ url = url.replace('https:', 'http:')
+ print('Failed download. Trying https -> http instead.'
+ ' Downloading ' + url + ' to ' + fpath)
+ RemoteFileHandler._urlretrieve(url, fpath)
+ else:
+ raise e
+
+ # check integrity of downloaded file
+ if not RemoteFileHandler.check_integrity(fpath, md5):
+ raise RuntimeError("File not found or corrupted.")
+
+ @staticmethod
+ def download_file_from_google_drive(file_id, root,
+ filename=None,
+ md5=None):
+ """Download a Google Drive file from and place it in root.
+ Args:
+ file_id (str): id of file to be downloaded
+ root (str): Directory to place downloaded file in
+ filename (str, optional): Name to save the file under.
+ If None, use the id of the file.
+ md5 (str, optional): MD5 checksum of the download.
+ If None, do not check
+ """
+ # Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url # noqa
+ import requests
+ url = "https://docs.google.com/uc?export=download"
+
+ root = os.path.expanduser(root)
+ if not filename:
+ filename = file_id
+ fpath = os.path.join(root, filename)
+
+ os.makedirs(root, exist_ok=True)
+
+ if os.path.isfile(fpath) and RemoteFileHandler.check_integrity(fpath,
+ md5):
+ print('Using downloaded and verified file: ' + fpath)
+ else:
+ session = requests.Session()
+
+ response = session.get(url, params={'id': file_id}, stream=True)
+ token = RemoteFileHandler._get_confirm_token(response)
+
+ if token:
+ params = {'id': file_id, 'confirm': token}
+ response = session.get(url, params=params, stream=True)
+
+ response_content_generator = response.iter_content(32768)
+ first_chunk = None
+ while not first_chunk: # filter out keep-alive new chunks
+ first_chunk = next(response_content_generator)
+
+ if RemoteFileHandler._quota_exceeded(first_chunk):
+ msg = (
+ f"The daily quota of the file {filename} is exceeded and "
+ f"it can't be downloaded. This is a limitation of "
+ f"Google Drive and can only be overcome by trying "
+ f"again later."
+ )
+ raise RuntimeError(msg)
+
+ RemoteFileHandler._save_response_content(
+ itertools.chain((first_chunk, ),
+ response_content_generator), fpath)
+ response.close()
+
+ @staticmethod
+ def unzip(path, destination_path=None):
+ if not destination_path:
+ destination_path = os.path.dirname(path)
+
+ _, archive_type = os.path.splitext(path)
+ archive_type = archive_type.lstrip('.')
+
+ if archive_type in ['zip']:
+ print("Unzipping {}->{}".format(path, destination_path))
+ zip_file = zipfile.ZipFile(path)
+ zip_file.extractall(destination_path)
+ zip_file.close()
+
+ elif archive_type in [
+ 'tar', 'tgz', 'tar.gz', 'tar.xz', 'tar.bz2'
+ ]:
+ print("Unzipping {}->{}".format(path, destination_path))
+ if archive_type == 'tar':
+ tar_type = 'r:'
+ elif archive_type.endswith('xz'):
+ tar_type = 'r:xz'
+ elif archive_type.endswith('gz'):
+ tar_type = 'r:gz'
+ elif archive_type.endswith('bz2'):
+ tar_type = 'r:bz2'
+ else:
+ tar_type = 'r:*'
+ try:
+ tar_file = tarfile.open(path, tar_type)
+ except tarfile.ReadError:
+ raise SystemExit("corrupted archive")
+ tar_file.extractall(destination_path)
+ tar_file.close()
+
+ @staticmethod
+ def _urlretrieve(url, filename, chunk_size):
+ with open(filename, "wb") as fh:
+ with urllib.request.urlopen(
+ urllib.request.Request(url,
+ headers={"User-Agent": USER_AGENT})) \
+ as response:
+ for chunk in iter(lambda: response.read(chunk_size), ""):
+ if not chunk:
+ break
+ fh.write(chunk)
+
+ @staticmethod
+ def _get_redirect_url(url, max_hops):
+ initial_url = url
+ headers = {"Method": "HEAD", "User-Agent": USER_AGENT}
+
+ for _ in range(max_hops + 1):
+ with urllib.request.urlopen(
+ urllib.request.Request(url, headers=headers)) as response:
+ if response.url == url or response.url is None:
+ return url
+
+ url = response.url
+ else:
+ raise RecursionError(
+ f"Request to {initial_url} exceeded {max_hops} redirects. "
+ f"The last redirect points to {url}."
+ )
+
+ @staticmethod
+ def _get_confirm_token(response):
+ for key, value in response.cookies.items():
+ if key.startswith('download_warning'):
+ return value
+
+ return None
+
+ @staticmethod
+ def _save_response_content(
+ response_gen, destination,
+ ):
+ with open(destination, "wb") as f:
+ pbar = enlighten.Counter(
+ total=None, desc="Save content", units="%", color="green")
+ progress = 0
+ for chunk in response_gen:
+ if chunk: # filter out keep-alive new chunks
+ f.write(chunk)
+ progress += len(chunk)
+
+ pbar.close()
+
+ @staticmethod
+ def _quota_exceeded(first_chunk):
+ try:
+ return "Google Drive - Quota exceeded" in first_chunk.decode()
+ except UnicodeDecodeError:
+ return False
+
+ @staticmethod
+ def _get_google_drive_file_id(url):
+ parts = urlparse(url)
+
+ if re.match(r"(drive|docs)[.]google[.]com", parts.netloc) is None:
+ return None
+
+ match = re.match(r"/file/d/(?P[^/]*)", parts.path)
+ if match is None:
+ return None
+
+ return match.group("id")
diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py
new file mode 100644
index 0000000000..1832efb7ed
--- /dev/null
+++ b/tests/lib/testing_classes.py
@@ -0,0 +1,263 @@
+"""Testing classes for module testing and publishing in hosts."""
+import os
+import sys
+import six
+import json
+import pytest
+import tempfile
+import shutil
+import glob
+
+from tests.lib.db_handler import DBHandler
+from tests.lib.file_handler import RemoteFileHandler
+
+
+class BaseTest:
+ """Empty base test class"""
+
+
+class ModuleUnitTest(BaseTest):
+ """Generic test class for testing modules
+
+ Use PERSIST==True to keep temporary folder and DB prepared for
+ debugging or preparation of test files.
+
+ Implemented fixtures:
+ monkeypatch_session - fixture for env vars with session scope
+ download_test_data - tmp folder with extracted data from GDrive
+ env_var - sets env vars from input file
+ db_setup - prepares avalon AND openpype DBs for testing from
+ binary dumps from input data
+ dbcon - returns DBConnection to AvalonDB
+ dbcon_openpype - returns DBConnection for OpenpypeMongoDB
+
+ """
+ PERSIST = False # True to not purge temporary folder nor test DB
+
+ TEST_OPENPYPE_MONGO = "mongodb://localhost:27017"
+ TEST_DB_NAME = "test_db"
+ TEST_PROJECT_NAME = "test_project"
+ TEST_OPENPYPE_NAME = "test_openpype"
+
+ TEST_FILES = []
+
+ PROJECT = "test_project"
+ ASSET = "test_asset"
+ TASK = "test_task"
+
+ @pytest.fixture(scope='session')
+ def monkeypatch_session(self):
+ """Monkeypatch couldn't be used with module or session fixtures."""
+ from _pytest.monkeypatch import MonkeyPatch
+ m = MonkeyPatch()
+ yield m
+ m.undo()
+
+ @pytest.fixture(scope="module")
+ def download_test_data(self):
+ tmpdir = tempfile.mkdtemp()
+ for test_file in self.TEST_FILES:
+ file_id, file_name, md5 = test_file
+
+ f_name, ext = os.path.splitext(file_name)
+
+ RemoteFileHandler.download_file_from_google_drive(file_id,
+ str(tmpdir),
+ file_name)
+
+ if ext.lstrip('.') in RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS:
+ RemoteFileHandler.unzip(os.path.join(tmpdir, file_name))
+ print("Temporary folder created:: {}".format(tmpdir))
+ yield tmpdir
+
+ if not self.PERSIST:
+ print("Removing {}".format(tmpdir))
+ shutil.rmtree(tmpdir)
+
+ @pytest.fixture(scope="module")
+ def env_var(self, monkeypatch_session, download_test_data):
+ """Sets temporary env vars from json file."""
+ env_url = os.path.join(download_test_data, "input",
+ "env_vars", "env_var.json")
+ if not os.path.exists(env_url):
+ raise ValueError("Env variable file {} doesn't exist".
+ format(env_url))
+
+ env_dict = {}
+ try:
+ with open(env_url) as json_file:
+ env_dict = json.load(json_file)
+ except ValueError:
+ print("{} doesn't contain valid JSON")
+ six.reraise(*sys.exc_info())
+
+ for key, value in env_dict.items():
+ all_vars = globals()
+ all_vars.update(vars(ModuleUnitTest)) # TODO check
+ value = value.format(**all_vars)
+ print("Setting {}:{}".format(key, value))
+ monkeypatch_session.setenv(key, str(value))
+ import openpype
+
+ openpype_root = os.path.dirname(os.path.dirname(openpype.__file__))
+ # ?? why 2 of those
+ monkeypatch_session.setenv("OPENPYPE_ROOT", openpype_root)
+ monkeypatch_session.setenv("OPENPYPE_REPOS_ROOT", openpype_root)
+
+ @pytest.fixture(scope="module")
+ def db_setup(self, download_test_data, env_var, monkeypatch_session):
+ """Restore prepared MongoDB dumps into selected DB."""
+ backup_dir = os.path.join(download_test_data, "input", "dumps")
+
+ uri = os.environ.get("OPENPYPE_MONGO")
+ db_handler = DBHandler(uri)
+ db_handler.setup_from_dump(self.TEST_DB_NAME, backup_dir, True,
+ db_name_out=self.TEST_DB_NAME)
+
+ db_handler.setup_from_dump("openpype", backup_dir, True,
+ db_name_out=self.TEST_OPENPYPE_NAME)
+
+ yield db_handler
+
+ if not self.PERSIST:
+ db_handler.teardown(self.TEST_DB_NAME)
+ db_handler.teardown(self.TEST_OPENPYPE_NAME)
+
+ @pytest.fixture(scope="module")
+ def dbcon(self, db_setup):
+ """Provide test database connection.
+
+ Database prepared from dumps with 'db_setup' fixture.
+ """
+ from avalon.api import AvalonMongoDB
+ dbcon = AvalonMongoDB()
+ dbcon.Session["AVALON_PROJECT"] = self.TEST_PROJECT_NAME
+ yield dbcon
+
+ @pytest.fixture(scope="module")
+ def dbcon_openpype(self, db_setup):
+ """Provide test database connection for OP settings.
+
+ Database prepared from dumps with 'db_setup' fixture.
+ """
+ from openpype.lib import OpenPypeMongoConnection
+ mongo_client = OpenPypeMongoConnection.get_mongo_client()
+ yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"]
+
+
+class PublishTest(ModuleUnitTest):
+ """Test class for publishing in hosts.
+
+ Implemented fixtures:
+ launched_app - launches APP with last_workfile_path
+ publish_finished - waits until publish is finished, host must
+ kill its process when finished publishing. Includes timeout
+ which raises ValueError
+
+ Not implemented:
+ last_workfile_path - returns path to testing workfile
+ startup_scripts - provide script for setup in host
+
+ Implemented tests:
+ test_folder_structure_same - compares published and expected
+ subfolders if they contain same files. Compares only on file
+ presence
+
+ TODO: implement test on file size, file content
+ """
+
+ APP = ""
+ APP_VARIANT = ""
+
+ APP_NAME = "{}/{}".format(APP, APP_VARIANT)
+
+ TIMEOUT = 120 # publish timeout
+
+ @pytest.fixture(scope="module")
+ def last_workfile_path(self, download_test_data):
+ raise NotImplementedError
+
+ @pytest.fixture(scope="module")
+ def startup_scripts(self, monkeypatch_session, download_test_data):
+ raise NotImplementedError
+
+ @pytest.fixture(scope="module")
+ def launched_app(self, dbcon, download_test_data, last_workfile_path,
+ startup_scripts):
+ """Launch host app"""
+ # set publishing folders
+ root_key = "config.roots.work.{}".format("windows") # TEMP
+ dbcon.update_one(
+ {"type": "project"},
+ {"$set":
+ {
+ root_key: download_test_data
+ }}
+ )
+
+ # set schema - for integrate_new
+ from openpype import PACKAGE_DIR
+ # Path to OpenPype's schema
+ schema_path = os.path.join(
+ os.path.dirname(PACKAGE_DIR),
+ "schema"
+ )
+ os.environ["AVALON_SCHEMA"] = schema_path
+
+ import openpype
+ openpype.install()
+ os.environ["OPENPYPE_EXECUTABLE"] = sys.executable
+ from openpype.lib import ApplicationManager
+
+ application_manager = ApplicationManager()
+ data = {
+ "last_workfile_path": last_workfile_path,
+ "start_last_workfile": True,
+ "project_name": self.PROJECT,
+ "asset_name": self.ASSET,
+ "task_name": self.TASK
+ }
+
+ yield application_manager.launch(self.APP_NAME, **data)
+
+ @pytest.fixture(scope="module")
+ def publish_finished(self, dbcon, launched_app, download_test_data):
+ """Dummy fixture waiting for publish to finish"""
+ import time
+ time_start = time.time()
+ while launched_app.poll() is None:
+ time.sleep(0.5)
+ if time.time() - time_start > self.TIMEOUT:
+ raise ValueError("Timeout reached")
+
+ # some clean exit test possible?
+ print("Publish finished")
+ yield True
+
+ def test_folder_structure_same(self, dbcon, publish_finished,
+ download_test_data):
+ """Check if expected and published subfolders contain same files.
+
+ Compares only presence, not size nor content!
+ """
+ published_dir_base = download_test_data
+ published_dir = os.path.join(published_dir_base,
+ self.PROJECT,
+ self.TASK,
+ "**")
+ expected_dir_base = os.path.join(published_dir_base,
+ "expected")
+ expected_dir = os.path.join(expected_dir_base,
+ self.PROJECT,
+ self.TASK,
+ "**")
+
+ published = set(f.replace(published_dir_base, '') for f in
+ glob.glob(published_dir, recursive=True) if
+ f != published_dir_base and os.path.exists(f))
+ expected = set(f.replace(expected_dir_base, '') for f in
+ glob.glob(expected_dir, recursive=True) if
+ f != expected_dir_base and os.path.exists(f))
+
+ not_matched = expected.difference(published)
+ assert not not_matched, "Missing {} files".format(not_matched)
diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip
new file mode 100644
index 0000000000..0faab86b37
Binary files /dev/null and b/tests/resources/test_data.zip differ
diff --git a/tests/igniter/test_bootstrap_repos.py b/tests/unit/igniter/test_bootstrap_repos.py
similarity index 100%
rename from tests/igniter/test_bootstrap_repos.py
rename to tests/unit/igniter/test_bootstrap_repos.py
diff --git a/tests/igniter/test_tools.py b/tests/unit/igniter/test_tools.py
similarity index 100%
rename from tests/igniter/test_tools.py
rename to tests/unit/igniter/test_tools.py
diff --git a/tests/openpype/lib/test_user_settings.py b/tests/unit/openpype/lib/test_user_settings.py
similarity index 100%
rename from tests/openpype/lib/test_user_settings.py
rename to tests/unit/openpype/lib/test_user_settings.py
diff --git a/tests/unit/openpype/modules/sync_server/test_site_operations.py b/tests/unit/openpype/modules/sync_server/test_site_operations.py
new file mode 100644
index 0000000000..6a861100a4
--- /dev/null
+++ b/tests/unit/openpype/modules/sync_server/test_site_operations.py
@@ -0,0 +1,134 @@
+"""Test file for Sync Server, tests site operations add_site, remove_site.
+
+ File:
+ creates temporary directory and downloads .zip file from GDrive
+ unzips .zip file
+ uses content of .zip file (MongoDB's dumps) to import to new databases
+ with use of 'monkeypatch_session' modifies required env vars
+ temporarily
+ runs battery of tests checking that site operation for Sync Server
+ module are working
+ removes temporary folder
+ removes temporary databases (?)
+"""
+import pytest
+
+from tests.lib.testing_classes import ModuleUnitTest
+from bson.objectid import ObjectId
+
+
+class TestSiteOperation(ModuleUnitTest):
+
+ REPRESENTATION_ID = "60e578d0c987036c6a7b741d"
+
+ TEST_FILES = [("1eCwPljuJeOI8A3aisfOIBKKjcmIycTEt",
+ "test_site_operations.zip", '')]
+
+ @pytest.fixture(scope="module")
+ def setup_sync_server_module(self, dbcon):
+ """Get sync_server_module from ModulesManager"""
+ from openpype.modules import ModulesManager
+
+ manager = ModulesManager()
+ sync_server = manager.modules_by_name["sync_server"]
+ yield sync_server
+
+ @pytest.mark.usefixtures("dbcon")
+ def test_project_created(self, dbcon):
+ assert ['test_project'] == dbcon.database.collection_names(False)
+
+ @pytest.mark.usefixtures("dbcon")
+ def test_objects_imported(self, dbcon):
+ count_obj = len(list(dbcon.database[self.TEST_PROJECT_NAME].find({})))
+ assert 15 == count_obj
+
+ @pytest.mark.usefixtures("setup_sync_server_module")
+ def test_add_site(self, dbcon, setup_sync_server_module):
+ """Adds 'test_site', checks that added,
+ checks that doesn't duplicate."""
+ query = {
+ "_id": ObjectId(self.REPRESENTATION_ID)
+ }
+
+ ret = dbcon.database[self.TEST_PROJECT_NAME].find(query)
+
+ assert 1 == len(list(ret)), \
+ "Single {} must be in DB".format(self.REPRESENTATION_ID)
+
+ setup_sync_server_module.add_site(self.TEST_PROJECT_NAME,
+ self.REPRESENTATION_ID,
+ site_name='test_site')
+
+ ret = list(dbcon.database[self.TEST_PROJECT_NAME].find(query))
+
+ assert 1 == len(ret), \
+ "Single {} must be in DB".format(self.REPRESENTATION_ID)
+
+ ret = ret.pop()
+ site_names = [site["name"] for site in ret["files"][0]["sites"]]
+ assert 'test_site' in site_names, "Site name wasn't added"
+
+ @pytest.mark.usefixtures("setup_sync_server_module")
+ def test_add_site_again(self, dbcon, setup_sync_server_module):
+ """Depends on test_add_site, must throw exception."""
+ with pytest.raises(ValueError):
+ setup_sync_server_module.add_site(self.TEST_PROJECT_NAME,
+ self.REPRESENTATION_ID,
+ site_name='test_site')
+
+ @pytest.mark.usefixtures("setup_sync_server_module")
+ def test_add_site_again_force(self, dbcon, setup_sync_server_module):
+ """Depends on test_add_site, must not throw exception."""
+ setup_sync_server_module.add_site(self.TEST_PROJECT_NAME,
+ self.REPRESENTATION_ID,
+ site_name='test_site', force=True)
+
+ query = {
+ "_id": ObjectId(self.REPRESENTATION_ID)
+ }
+
+ ret = list(dbcon.database[self.TEST_PROJECT_NAME].find(query))
+
+ assert 1 == len(ret), \
+ "Single {} must be in DB".format(self.REPRESENTATION_ID)
+
+ @pytest.mark.usefixtures("setup_sync_server_module")
+ def test_remove_site(self, dbcon, setup_sync_server_module):
+ """Depends on test_add_site, must remove 'test_site'."""
+ setup_sync_server_module.remove_site(self.TEST_PROJECT_NAME,
+ self.REPRESENTATION_ID,
+ site_name='test_site')
+
+ query = {
+ "_id": ObjectId(self.REPRESENTATION_ID)
+ }
+
+ ret = list(dbcon.database[self.TEST_PROJECT_NAME].find(query))
+
+ assert 1 == len(ret), \
+ "Single {} must be in DB".format(self.REPRESENTATION_ID)
+
+ ret = ret.pop()
+ site_names = [site["name"] for site in ret["files"][0]["sites"]]
+
+ assert 'test_site' not in site_names, "Site name wasn't removed"
+
+ @pytest.mark.usefixtures("setup_sync_server_module")
+ def test_remove_site_again(self, dbcon, setup_sync_server_module):
+ """Depends on test_add_site, must trow exception"""
+ with pytest.raises(ValueError):
+ setup_sync_server_module.remove_site(self.TEST_PROJECT_NAME,
+ self.REPRESENTATION_ID,
+ site_name='test_site')
+
+ query = {
+ "_id": ObjectId(self.REPRESENTATION_ID)
+ }
+
+ ret = list(dbcon.database[self.TEST_PROJECT_NAME].find(query))
+
+ assert 1 == len(ret), \
+ "Single {} must be in DB".format(self.REPRESENTATION_ID)
+
+
+test_case = TestSiteOperation()
diff --git a/tools/build_win_installer.ps1 b/tools/build_win_installer.ps1
index a0832e0135..49fa803742 100644
--- a/tools/build_win_installer.ps1
+++ b/tools/build_win_installer.ps1
@@ -105,6 +105,46 @@ $env:BUILD_VERSION = $openpype_version
iscc
+Write-Host ">>> " -NoNewline -ForegroundColor green
+Write-Host "Detecting host Python ... " -NoNewline
+$python = "python"
+if (Get-Command "pyenv" -ErrorAction SilentlyContinue) {
+ $pyenv_python = & pyenv which python
+ if (Test-Path -PathType Leaf -Path "$($pyenv_python)") {
+ $python = $pyenv_python
+ }
+}
+if (-not (Get-Command $python -ErrorAction SilentlyContinue)) {
+ Write-Host "!!! Python not detected" -ForegroundColor red
+ Set-Location -Path $current_dir
+ Exit-WithCode 1
+}
+$version_command = @'
+import sys
+print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))
+'@
+
+$p = & $python -c $version_command
+$env:PYTHON_VERSION = $p
+$m = $p -match '(\d+)\.(\d+)'
+if(-not $m) {
+ Write-Host "!!! Cannot determine version" -ForegroundColor red
+ Set-Location -Path $current_dir
+ Exit-WithCode 1
+}
+# We are supporting python 3.7 only
+if (($matches[1] -lt 3) -or ($matches[2] -lt 7)) {
+ Write-Host "FAILED Version [ $p ] is old and unsupported" -ForegroundColor red
+ Set-Location -Path $current_dir
+ Exit-WithCode 1
+} elseif (($matches[1] -eq 3) -and ($matches[2] -gt 7)) {
+ Write-Host "WARNING Version [ $p ] is unsupported, use at your own risk." -ForegroundColor yellow
+ Write-Host "*** " -NoNewline -ForegroundColor yellow
+ Write-Host "OpenPype supports only Python 3.7" -ForegroundColor white
+} else {
+ Write-Host "OK [ $p ]" -ForegroundColor green
+}
+
Write-Host ">>> " -NoNewline -ForegroundColor green
Write-Host "Creating OpenPype installer ... " -ForegroundColor white
@@ -114,7 +154,7 @@ from distutils.util import get_platform
print('exe.{}-{}'.format(get_platform(), sys.version[0:3]))
"@
-$build_dir = & python -c $build_dir_command
+$build_dir = & $python -c $build_dir_command
Write-Host "Build directory ... ${build_dir}" -ForegroundColor white
$env:BUILD_DIR = $build_dir
diff --git a/tools/ci_tools.py b/tools/ci_tools.py
index 3c1aaae991..337b19a346 100644
--- a/tools/ci_tools.py
+++ b/tools/ci_tools.py
@@ -3,7 +3,34 @@ import sys
from semver import VersionInfo
from git import Repo
from optparse import OptionParser
+from github import Github
+import os
+def get_release_type_github(Log, github_token):
+ # print(Log)
+ minor_labels = ["type: feature", "type: deprecated"]
+ patch_labels = ["type: enhancement", "type: bug"]
+
+ g = Github(github_token)
+ repo = g.get_repo("pypeclub/OpenPype")
+
+ labels = set()
+ for line in Log.splitlines():
+ match = re.search("pull request #(\d+)", line)
+ if match:
+ pr_number = match.group(1)
+ pr = repo.get_pull(int(pr_number))
+ for label in pr.labels:
+ labels.add(label.name)
+
+ if any(label in labels for label in minor_labels):
+ return "minor"
+
+ if any(label in labels for label in patch_labels):
+ return "path"
+
+ return None
+
def remove_prefix(text, prefix):
return text[text.startswith(prefix) and len(prefix):]
@@ -36,7 +63,7 @@ def get_log_since_tag(version):
def release_type(log):
regex_minor = ["feature/", "(feat)"]
- regex_patch = ["bugfix/", "fix/", "(fix)", "enhancement/"]
+ regex_patch = ["bugfix/", "fix/", "(fix)", "enhancement/", "update"]
for reg in regex_minor:
if re.search(reg, log):
return "minor"
@@ -69,7 +96,7 @@ def bump_file_versions(version):
file_regex_replace(filename, regex, pyproject_version)
-def calculate_next_nightly(token="nightly"):
+def calculate_next_nightly(type="nightly", github_token=None):
last_prerelease, last_pre_tag = get_last_version("CI")
last_pre_v = VersionInfo.parse(last_prerelease)
last_pre_v_finalized = last_pre_v.finalize_version()
@@ -78,7 +105,10 @@ def calculate_next_nightly(token="nightly"):
last_release, last_release_tag = get_last_version("release")
last_release_v = VersionInfo.parse(last_release)
- bump_type = release_type(get_log_since_tag(last_release))
+ bump_type = get_release_type_github(
+ get_log_since_tag(last_release_tag),
+ github_token
+ )
if not bump_type:
return None
@@ -86,10 +116,10 @@ def calculate_next_nightly(token="nightly"):
# print(next_release_v)
if next_release_v > last_pre_v_finalized:
- next_tag = next_release_v.bump_prerelease(token=token).__str__()
+ next_tag = next_release_v.bump_prerelease(token=type).__str__()
return next_tag
elif next_release_v == last_pre_v_finalized:
- next_tag = last_pre_v.bump_prerelease(token=token).__str__()
+ next_tag = last_pre_v.bump_prerelease(token=type).__str__()
return next_tag
def finalize_latest_nightly():
@@ -125,30 +155,36 @@ def main():
help="finalize latest prerelease to a release")
parser.add_option("-p", "--prerelease",
dest="prerelease", action="store",
- help="define prerelease token")
+ help="define prerelease type")
parser.add_option("-f", "--finalize",
dest="finalize", action="store",
- help="define prerelease token")
+ help="define prerelease type")
parser.add_option("-v", "--version",
dest="version", action="store",
help="work with explicit version")
parser.add_option("-l", "--lastversion",
dest="lastversion", action="store",
help="work with explicit version")
+ parser.add_option("-g", "--github_token",
+ dest="github_token", action="store",
+ help="github token")
(options, args) = parser.parse_args()
if options.bump:
- last_CI, last_CI_tag = get_last_version("CI")
last_release, last_release_tag = get_last_version("release")
- bump_type_CI = release_type(get_log_since_tag(last_CI_tag))
- bump_type_release = release_type(get_log_since_tag(last_release_tag))
- if bump_type_CI is None or bump_type_release is None:
+ bump_type_release = get_release_type_github(
+ get_log_since_tag(last_release_tag),
+ options.github_token
+ )
+ if bump_type_release is None:
print("skip")
+ else:
+ print(bump_type_release)
if options.nightly:
- next_tag_v = calculate_next_nightly()
+ next_tag_v = calculate_next_nightly(github_token=options.github_token)
print(next_tag_v)
bump_file_versions(next_tag_v)
diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md
index d6ccc883b0..7a46ee7906 100644
--- a/website/docs/admin_openpype_commands.md
+++ b/website/docs/admin_openpype_commands.md
@@ -55,7 +55,7 @@ openpype_console tray --debug
---
### `launch` arguments {#eventserver-arguments}
You have to set either proper environment variables to provide URL and credentials or use
-option to specify them. If you use `--store_credentials` provided credentials will be stored for later use.
+option to specify them.
| Argument | Description |
| --- | --- |
@@ -63,16 +63,13 @@ option to specify them. If you use `--store_credentials` provided credentials wi
| `--ftrack-url` | URL to ftrack server (can be set with `FTRACK_SERVER`) |
| `--ftrack-user` |user name to log in to ftrack (can be set with `FTRACK_API_USER`) |
| `--ftrack-api-key` | ftrack api key (can be set with `FTRACK_API_KEY`) |
-| `--ftrack-events-path` | path to event server plugins (can be set with `FTRACK_EVENTS_PATH`) |
-| `--no-stored-credentials` | will use credential specified with options above |
-| `--store-credentials` | will store credentials to file for later use |
| `--legacy` | run event server without mongo storing |
| `--clockify-api-key` | Clockify API key (can be set with `CLOCKIFY_API_KEY`) |
| `--clockify-workspace` | Clockify workspace (can be set with `CLOCKIFY_WORKSPACE`) |
To run ftrack event server:
```shell
-openpype_console eventserver --ftrack-url= --ftrack-user= --ftrack-api-key= --ftrack-events-path= --no-stored-credentials --store-credentials
+openpype_console eventserver --ftrack-url= --ftrack-user= --ftrack-api-key=
```
---
diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md
index 80154356af..6057ed0830 100644
--- a/website/docs/admin_settings_system.md
+++ b/website/docs/admin_settings_system.md
@@ -21,6 +21,9 @@ as a naive barier to prevent artists from accidental setting changes.
**`Environment`** - Globally applied environment variables that will be appended to any OpenPype process in the studio.
+**`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up.
+Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume).
+
**`Versions Repository`** - Location where automatic update mechanism searches for zip files with
OpenPype update packages. To read more about preparing OpenPype for automatic updates go to [Admin Distribute docs](admin_distribute#2-openpype-codebase)
diff --git a/website/docs/artist_hosts_nuke_tut.md b/website/docs/artist_hosts_nuke_tut.md
new file mode 100644
index 0000000000..4d116bd958
--- /dev/null
+++ b/website/docs/artist_hosts_nuke_tut.md
@@ -0,0 +1,337 @@
+---
+id: artist_hosts_nuke_tut
+title: Nuke
+sidebar_label: Nuke
+---
+
+:::note
+OpenPype supports Nuke version **`11.0`** and above.
+:::
+
+## OpenPype global tools
+
+- [Set Context](artist_tools.md#set-context)
+- [Work Files](artist_tools.md#workfiles)
+- [Create](artist_tools.md#creator)
+- [Load](artist_tools.md#loader)
+- [Manage (Inventory)](artist_tools.md#inventory)
+- [Publish](artist_tools.md#publisher)
+- [Library Loader](artist_tools.md#library-loader)
+
+## Nuke specific tools
+
+
+
+
+### Set Frame Ranges
+
+Use this feature in case you are not sure the frame range is correct.
+
+##### Result
+
+- setting Frame Range in script settings
+- setting Frame Range in viewers (timeline)
+
+
+
+
+
+
+
+
+
+
+1. limiting to Frame Range without handles
+2. **Input** handle on start
+3. **Output** handle on end
+
+
+
+
+### Set Resolution
+
+
+
+
+
+This menu item will set correct resolution format for you defined by your production.
+
+##### Result
+
+- creates new item in formats with project name
+- sets the new format as used
+
+
+
+This menu item will set correct Colorspace definitions for you. All has to be configured by your production (Project coordinator).
+
+##### Result
+
+- set Colorspace in your script settings
+- set preview LUT to your viewers
+- set correct colorspace to all discovered Read nodes (following expression set in settings)
+
+
+
+It is usually enough if you once per while use this option just to make yourself sure the workfile is having set correct properties.
+
+##### Result
+
+- set Frame Ranges
+- set Colorspace
+- set Resolution
+
+
+
+
+
+
+
+
+
+### Build Workfile
+
+
+
+
+This tool will append all available subsets into an actual node graph. It will look into database and get all last [versions](artist_concepts.md#version) of available [subsets](artist_concepts.md#subset).
+
+
+##### Result
+
+- adds all last versions of subsets (rendered image sequences) as read nodes
+- ~~adds publishable write node as `renderMain` subset~~
+
+
+
+
+
+
+
+
+
+## Nuke QuickStart
+
+This QuickStart is short introduction to what OpenPype can do for you. It attempts to make an overview for compositing artists, and simplifies processes that are better described in specific parts of the documentation.
+
+### Launch Nuke - Shot and Task Context
+OpenPype has to know what shot and task you are working on. You need to run Nuke in context of the task, using Ftrack Action or OpenPype Launcher to select the task and run Nuke.
+
+
+
+
+:::tip Admin Tip - Nuke version
+You can [configure](admin_settings_project_anatomy.md#Attributes) which DCC version(s) will be available for current project in **Studio Settings β Project β Anatomy β Attributes β Applications**
+:::
+
+### Nuke Initial setup
+Nuke OpenPype menu shows the current context
+
+
+
+Launching Nuke with context stops your timer, and starts the clock on the shot and task you picked.
+
+Openpype makes initial setup for your Nuke script. It is the same as running [Apply All Settings](artist_hosts_nuke.md#apply-all-settings) from the OpenPype menu.
+
+- Reads frame range and resolution from Avalon database, sets it in Nuke Project Settings,
+Creates Viewer node, sets itβs range and indicates handles by In and Out points.
+
+- Reads Color settings from the project configuration, and sets it in Nuke Project Settings and Viewer.
+
+- Sets project directory in the Nuke Project Settings to the Nuke Script Directory
+
+:::tip Tip - Project Settings
+After Nuke starts it will automatically **Apply All Settings** for you. If you are sure the settings are wrong just contact your supervisor and he will set them correctly for you in project database.
+:::
+
+### Save Nuke script β the Work File
+Use OpenPype - Work files menu to create a new Nuke script. Openpype offers you the preconfigured naming.
+
+
+The Next Available Version checks the work folder for already used versions and offers the lowest unused version number automatically.
+
+Subversion can be used to distinguish or name versions. For example used to add shortened artist name.
+
+More about [workfiles](artist_tools#workfiles).
+
+
+:::tip Admin Tips
+- **Workfile Naming**
+
+ - The [workfile naming](admin_settings_project_anatomy#templates) is configured in anatomy, see **Studio Settings β Project β Anatomy β Templates β Work**
+
+- **Open Workfile**
+
+ - You can [configure](project_settings/settings_project_nuke#create-first-workfile) Nuke to automatically open the last version, or create a file on startup. See **Studio Settings β Project β Global β Tools β Workfiles**
+
+- **Nuke Color Settings**
+
+ - [Color setting](project_settings/settings_project_nuke) for Nuke can be found in **Studio Settings β Project β Anatomy β Color Management and Output Formats β Nuke**
+:::
+
+### Load plate
+Use Load from OpenPype menu to load any plates or renders available.
+
+
+
+Pick the plate asset, right click and choose Load Image Sequence to create a Read node in Nuke.
+
+Note that the Read node created by OpenPype is green. Green color indicates the highest version of asset is loaded. Asset versions could be easily changed by [Manage](#managing-versions). Lower versions will be highlighted by orange color on the read node.
+
+
+
+More about [Asset loader](artist_tools#loader).
+
+### Create Write Node
+To create OpenPype managed Write node, select the Read node you just created, from OpenPype menu, pick Create.
+In the Instance Creator, pick Create Write Render, and Create.
+
+
+
+This will create a Group with a Write node inside.
+
+
+
+:::tip Admin Tip - Configuring write node
+You can configure write node parameters in **Studio Settings β Project β Anatomy β Color Management and Output Formats β Nuke β Nodes**
+:::
+
+#### What Nuke Publish Does
+From Artist perspective, Nuke publish gathers all the stuff found in the Nuke script with Publish checkbox set to on, exports stuff and raises the Nuke script (workfile) version.
+
+The Pyblish dialog shows the progress of the process.
+
+The left column of the dialog shows what will be published. Typically it is one or more renders or prerenders, plus work file.
+
+
+
+The right column shows the publish steps
+
+##### Publish steps
+1. Gathers all the stuff found in the Nuke script with Publish checkbox set to on
+2. Collects all the info (from the script, databaseβ¦)
+3. Validates components to be published (checks render range and resolution...)
+4. Extracts data from the script
+ - generates thumbnail
+ - creates review(s) like h264
+ - adds burnins to review(s)
+ - Copies and renames components like render(s), review(s), Nuke script... to publish folder
+5. Integrates components (writes to database, sends preview of the render to Ftrack ...
+6. Increments Nuke script version, cleans up the render directory
+
+Gathering all the info and validating usually takes just a few seconds. Creating reviews for long, high resolution shots can however take significant amount of time when publishing locally.
+
+##### Pyblish Note and Intent
+
+
+Artist can add Note and Intent before firing the publish button. The Note and Intent is ment for easy communication between artist and supervisor. After publish, Note and Intent can be seen in Ftrack notes.
+
+##### Pyblish Checkbox
+
+
+
+Pyblish Dialog tries to pack a lot of info in a small area. One of the more tricky parts is that it uses non-standard checkboxes. Some squares can be turned on and off by the artist, some are mandatory.
+
+If you run the publish and decide to not publish the Nuke script, you can turn it off right in the Pyblish dialog by clicking on the checkbox. If you decide to render and publish the shot in lower resolution to speed up the turnaround, you have to turn off the Write Resolution validator. If you want to use an older version of the asset (older version of the plate...), you have to turn off the Validate containers, and so on.
+
+More info about [Using Pyblish](artist_tools#publisher)
+
+:::tip Admin Tip - Configuring validators
+You can configure Nuke validators like Output Resolution in **Studio Settings β Project β Nuke β Publish plugins**
+:::
+
+### Review
+
+
+When you turn the review checkbox on in your OpenPype write node, here is what happens:
+- OpenPype uses the current Nuke script to
+ - Load the render
+ - Optionally apply LUT
+ - Render Prores 4444 with the same resolution as your render
+- Use Ffmpeg to convert the Prores to whatever review(s) you defined
+- Use Ffmpeg to add (optional) burnin to the review(s) from previous step
+
+Creating reviews is a part of the publishing process. If you choose to do a local publish or to use existing frames, review will be processed also on the artist's machine.
+If you choose to publish on the farm, you will render and do reviews on the farm.
+
+So far there is no option for using existing frames (from your local / check render) and just do the review on the farm.
+
+More info about [configuring reviews](pype2/admin_presets_plugins#extractreview).
+
+:::tip Admin Tip - Configuring Reviews
+You can configure reviewsin **Studio Settings β Project β Global β Publish plugins β ExtractReview / ExtractBurnin**
+Reviews can be configured separately for each host, task, or family. For example Maya can produce different review to Nuke, animation task can have different burnin then modelling, and plate can have different review then model.
+:::
+
+### Render and Publish
+
+
+
+Letβs say you want to render and publish the shot right now, with only a Read and Write node. You need to decide if you want to render, check the render and then publish it, or you want to execute the render and publish in one go.
+
+If you wish to check your render before publishing, you can use your local machine or your farm to render the write node as you would do without OpenPype, load and check your render (OpenPype Write has a convenience button for that), and if happy, use publish with Use existing frames option selected in the write node to generate the review on your local machine.
+
+If you want to render and publish on the farm in one go, run publish with On farm option selected in the write node to render and make the review on farm.
+
+
+
+### Version-less Render
+
+
+
+OpenPype is configured so your render file names have no version number until the render is fully finished and published. The main advantage is that you can keep the render from the previous version and re-render only part of the shot. With care, this is handy.
+
+Main disadvantage of this approach is that you can render only one version of your shot at one time. Otherwise you risk to partially overwrite your shot render before publishing copies and renames the rendered files to the properly versioned publish folder.
+
+When making quick farm publishes, like making two versions with different color correction, care must be taken to let the first job (first version) completely finish before the second version starts rendering.
+
+### Managing Versions
+
+
+
+OpenPype checks all the assets loaded to Nuke on script open. All out of date assets are colored orange, up to date assets are colored green.
+
+Use Manage to switch versions for loaded assets.
+
+## Troubleshooting
+
+### Fixing Validate Containers
+
+
+
+If your Pyblish dialog fails on Validate Containers, you might have an old asset loaded. Use OpenPype - Manage... to switch the asset(s) to the latest version.
+
+### Fixing Validate Version
+If your Pyblish dialog fails on Validate Version, you might be trying to publish already published version. Rise your version in the OpenPype WorkFiles SaveAs.
+
+Or maybe you accidentaly copied write node from different shot to your current one. Check the write publishes on the left side of the Pyblish dialog. Typically you publish only one write. Locate and delete the stray write from other shot.
\ No newline at end of file
diff --git a/website/docs/assets/nuke_tut/nuke_AnatomyAppsVersions.png b/website/docs/assets/nuke_tut/nuke_AnatomyAppsVersions.png
new file mode 100644
index 0000000000..92e1b4dad7
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_AnatomyAppsVersions.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_AssetLoadOutOfDate.png b/website/docs/assets/nuke_tut/nuke_AssetLoadOutOfDate.png
new file mode 100644
index 0000000000..f7f807a94f
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_AssetLoadOutOfDate.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_AssetLoader.png b/website/docs/assets/nuke_tut/nuke_AssetLoader.png
new file mode 100644
index 0000000000..e52abdc428
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_AssetLoader.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_Context.png b/website/docs/assets/nuke_tut/nuke_Context.png
new file mode 100644
index 0000000000..65bb288764
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_Context.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_Create.png b/website/docs/assets/nuke_tut/nuke_Create.png
new file mode 100644
index 0000000000..2c843c05df
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_Create.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_Creator.png b/website/docs/assets/nuke_tut/nuke_Creator.png
new file mode 100644
index 0000000000..454777574a
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_Creator.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_Load.png b/website/docs/assets/nuke_tut/nuke_Load.png
new file mode 100644
index 0000000000..2a345dc69f
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_Load.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_ManageVersion.png b/website/docs/assets/nuke_tut/nuke_ManageVersion.png
new file mode 100644
index 0000000000..c9f2091347
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_ManageVersion.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_NukeColor.png b/website/docs/assets/nuke_tut/nuke_NukeColor.png
new file mode 100644
index 0000000000..5c4f9a15e0
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_NukeColor.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_Publish.png b/website/docs/assets/nuke_tut/nuke_Publish.png
new file mode 100644
index 0000000000..b53b6cc06c
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_Publish.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_PyblishCheckBox.png b/website/docs/assets/nuke_tut/nuke_PyblishCheckBox.png
new file mode 100644
index 0000000000..2c5d59c9d5
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_PyblishCheckBox.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_PyblishDialogNuke.png b/website/docs/assets/nuke_tut/nuke_PyblishDialogNuke.png
new file mode 100644
index 0000000000..e98a4b9553
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_PyblishDialogNuke.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_PyblishDialogNukeNoteIntent.png b/website/docs/assets/nuke_tut/nuke_PyblishDialogNukeNoteIntent.png
new file mode 100644
index 0000000000..3519ecc22d
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_PyblishDialogNukeNoteIntent.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_RenderLocalFarm.png b/website/docs/assets/nuke_tut/nuke_RenderLocalFarm.png
new file mode 100644
index 0000000000..4c4c8977a0
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_RenderLocalFarm.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction.png b/website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction.png
new file mode 100644
index 0000000000..75faaec572
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction_p3.png b/website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction_p3.png
new file mode 100644
index 0000000000..27fec32ae4
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_RunNukeFtrackAction_p3.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_RunNukeLauncher.png b/website/docs/assets/nuke_tut/nuke_RunNukeLauncher.png
new file mode 100644
index 0000000000..a42ee6d7b9
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_RunNukeLauncher.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_RunNukeLauncher_p2.png b/website/docs/assets/nuke_tut/nuke_RunNukeLauncher_p2.png
new file mode 100644
index 0000000000..2a36cad380
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_RunNukeLauncher_p2.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_ValidateContainers.png b/website/docs/assets/nuke_tut/nuke_ValidateContainers.png
new file mode 100644
index 0000000000..78e0f2edd7
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_ValidateContainers.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_WorkFileNamingAnatomy.png b/website/docs/assets/nuke_tut/nuke_WorkFileNamingAnatomy.png
new file mode 100644
index 0000000000..115a321285
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_WorkFileNamingAnatomy.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_WorkFileSaveAs.png b/website/docs/assets/nuke_tut/nuke_WorkFileSaveAs.png
new file mode 100644
index 0000000000..661f44632a
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_WorkFileSaveAs.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_WorkfileOnStartup.png b/website/docs/assets/nuke_tut/nuke_WorkfileOnStartup.png
new file mode 100644
index 0000000000..450589ee3a
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_WorkfileOnStartup.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_WriteNode.png b/website/docs/assets/nuke_tut/nuke_WriteNode.png
new file mode 100644
index 0000000000..5ce3e81aab
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_WriteNode.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_WriteNodeCreated.png b/website/docs/assets/nuke_tut/nuke_WriteNodeCreated.png
new file mode 100644
index 0000000000..b283593d6a
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_WriteNodeCreated.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_WriteNodeReview.png b/website/docs/assets/nuke_tut/nuke_WriteNodeReview.png
new file mode 100644
index 0000000000..68651cdd6c
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_WriteNodeReview.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_WriteSettings.png b/website/docs/assets/nuke_tut/nuke_WriteSettings.png
new file mode 100644
index 0000000000..cf00adbee6
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_WriteSettings.png differ
diff --git a/website/docs/assets/nuke_tut/nuke_versionless.png b/website/docs/assets/nuke_tut/nuke_versionless.png
new file mode 100644
index 0000000000..fbb98c55e2
Binary files /dev/null and b/website/docs/assets/nuke_tut/nuke_versionless.png differ
diff --git a/website/docs/assets/settings/settings_system_general.png b/website/docs/assets/settings/settings_system_general.png
index 4c1452d0d4..d04586205d 100644
Binary files a/website/docs/assets/settings/settings_system_general.png and b/website/docs/assets/settings/settings_system_general.png differ
diff --git a/website/docs/assets/site_sync_project_sftp_settings.png b/website/docs/assets/site_sync_project_sftp_settings.png
new file mode 100644
index 0000000000..79267ba1d7
Binary files /dev/null and b/website/docs/assets/site_sync_project_sftp_settings.png differ
diff --git a/website/docs/assets/site_sync_sftp_project_setting_not_forced.png b/website/docs/assets/site_sync_sftp_project_setting_not_forced.png
new file mode 100644
index 0000000000..7bd735b46c
Binary files /dev/null and b/website/docs/assets/site_sync_sftp_project_setting_not_forced.png differ
diff --git a/website/docs/assets/site_sync_sftp_settings_local.png b/website/docs/assets/site_sync_sftp_settings_local.png
new file mode 100644
index 0000000000..45125da1a8
Binary files /dev/null and b/website/docs/assets/site_sync_sftp_settings_local.png differ
diff --git a/website/docs/assets/site_sync_sftp_system.png b/website/docs/assets/site_sync_sftp_system.png
new file mode 100644
index 0000000000..6e8e125f95
Binary files /dev/null and b/website/docs/assets/site_sync_sftp_system.png differ
diff --git a/website/docs/module_ftrack.md b/website/docs/module_ftrack.md
index 005270b3b9..8e3806828d 100644
--- a/website/docs/module_ftrack.md
+++ b/website/docs/module_ftrack.md
@@ -51,10 +51,7 @@ There are specific launch arguments for event server. With `openpype_console eve
- **`--ftrack-user "your.username"`** : Ftrack Username
- **`--ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee"`** : User's API key
-- **`--store-crededentials`** : Entered credentials will be stored for next launch with this argument _(It is not needed to enter **ftrackuser** and **ftrackapikey** args on next launch)_
-- **`--no-stored-credentials`** : Stored credentials are loaded first so if you want to change credentials use this argument
- `--ftrack-url "https://yourdomain.ftrackapp.com/"` : Ftrack server URL _(it is not needed to enter if you have set `FTRACK_SERVER` in OpenPype' environments)_
-- `--ftrack-events-path "//Paths/To/Events/"` : Paths to events folder. May contain multiple paths separated by `;`. _(it is not needed to enter if you have set `FTRACK_EVENTS_PATH` in OpenPype' environments)_
So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype_console.exe eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype_console.exe eventserver`.
@@ -64,8 +61,6 @@ So if you want to use OpenPype's environments then you can launch event server f
- `FTRACK_API_USER` - Username _("your.username")_
- `FTRACK_API_KEY` - User's API key _("00000aaa-11bb-22cc-33dd-444444eeeee")_
- `FTRACK_SERVER` - Ftrack server url _(")_
-- `FTRACK_EVENTS_PATH` - Paths to events _("//Paths/To/Events/")_
- We do not recommend you this way.
@@ -103,10 +98,12 @@ Event server should **not** run more than once! It may cause major issues.
`sudo vi /opt/openpype/run_event_server.sh`
- add content to the file:
```sh
-#!/usr/bin/env
-export OPENPYPE_DEBUG=3
-pushd /mnt/pipeline/prod/openpype-setup
-. openpype_console eventserver --ftrack-user --ftrack-api-key
+#!/usr/bin/env bash
+export OPENPYPE_DEBUG=1
+export OPENPYPE_MONGO=
+
+pushd /mnt/path/to/openpype
+./openpype_console eventserver --ftrack-user --ftrack-api-key
```
- change file permission:
`sudo chmod 0755 /opt/openpype/run_event_server.sh`
@@ -146,9 +143,11 @@ WantedBy=multi-user.target
- add content to the service file:
```sh
@echo off
-set OPENPYPE_DEBUG=3
-pushd \\path\to\file\
-openpype_console.exe eventserver --ftrack-user --ftrack-api-key
+set OPENPYPE_DEBUG=1
+set OPENPYPE_MONGO=
+
+pushd \\path\to\openpype
+openpype_console.exe eventserver --ftrack-user --ftrack-api-key
```
- download and install `nssm.cc`
- create Windows service according to nssm.cc manual
diff --git a/website/docs/module_site_sync.md b/website/docs/module_site_sync.md
index 6ee6660048..b0604ed3cf 100644
--- a/website/docs/module_site_sync.md
+++ b/website/docs/module_site_sync.md
@@ -106,6 +106,36 @@ To get working connection to Google Drive there are some necessary steps:
- add new site back in OpenPype Settings, name as you want, provider needs to be 'gdrive'
- distribute credentials file via shared mounted disk location
+### SFTP
+
+SFTP provider is used to connect to SFTP server. Currently authentication with `user:password` or `user:ssh key` is implemented.
+Please provide only one combination, don't forget to provide password for ssh key if ssh key was created with a passphrase.
+
+(SFTP connection could be a bit finicky, use FileZilla or WinSCP for testing connection, it will be mush faster.)
+
+Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)!
+
+#### How to set SFTP site
+
+- Enable Site Sync module in Settings
+- Add side with SFTP provider
+
+
+
+- In Projects setting enable Site Sync (on default project - all project will be synched, or on specific project)
+- Configure SFTP connection and destination folder on a SFTP server (in screenshot `/upload`)
+
+
+
+- if you want to force synching between local and sftp site for all users, use combination `active site: local`, `remote site: NAME_OF_SFTP_SITE`
+- if you want to allow only specific users to use SFTP synching (external users, not located in the office), use `active site: studio`, `remote site: studio`.
+
+
+
+- Each artist can decide and configure synching from his/her local to SFTP via `Local Settings`
+
+
+
### Custom providers
If a studio needs to use other services for cloud storage, or want to implement totally different storage providers, they can do so by writing their own provider plugin. We're working on a developer documentation, however, for now we recommend looking at `abstract_provider.py`and `gdrive.py` inside `openpype/modules/sync_server/providers` and using it as a template.
diff --git a/website/docs/pype2/admin_presets_plugins.md b/website/docs/pype2/admin_presets_plugins.md
index 797995d2b7..9c838d4a64 100644
--- a/website/docs/pype2/admin_presets_plugins.md
+++ b/website/docs/pype2/admin_presets_plugins.md
@@ -469,6 +469,7 @@ maya outliner colours for various families
"camera": [0.447, 0.312, 1.0],
"fbx": [1.0, 0.931, 0.312],
"mayaAscii": [0.312, 1.0, 0.747],
+ "mayaScene": [0.312, 1.0, 0.747],
"setdress": [0.312, 1.0, 0.747],
"layout": [0.312, 1.0, 0.747],
"vdbcache": [0.312, 1.0, 0.428],
diff --git a/website/sidebars.js b/website/sidebars.js
index 3a4b933b9a..38e4206b84 100644
--- a/website/sidebars.js
+++ b/website/sidebars.js
@@ -18,7 +18,7 @@ module.exports = {
label: "Integrations",
items: [
"artist_hosts_hiero",
- "artist_hosts_nuke",
+ "artist_hosts_nuke_tut",
"artist_hosts_maya",
"artist_hosts_blender",
"artist_hosts_harmony",
diff --git a/website/yarn.lock b/website/yarn.lock
index b4c12edeb6..066d156d97 100644
--- a/website/yarn.lock
+++ b/website/yarn.lock
@@ -6594,9 +6594,9 @@ prism-react-renderer@^1.1.1:
integrity sha512-GHqzxLYImx1iKN1jJURcuRoA/0ygCcNhfGw1IT8nPIMzarmKQ3Nc+JcG0gi8JXQzuh0C5ShE4npMIoqNin40hg==
prismjs@^1.23.0:
- version "1.24.0"
- resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.24.0.tgz#0409c30068a6c52c89ef7f1089b3ca4de56be2ac"
- integrity sha512-SqV5GRsNqnzCL8k5dfAjCNhUrF3pR0A9lTDSCUZeh/LIshheXJEaP0hwLz2t4XHivd2J/v2HR+gRnigzeKe3cQ==
+ version "1.25.0"
+ resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.25.0.tgz#6f822df1bdad965734b310b315a23315cf999756"
+ integrity sha512-WCjJHl1KEWbnkQom1+SzftbtXMKQoezOCYs5rECqMN+jP+apI7ftoflyqigqzopSO3hMhTEb0mFClA8lkolgEg==
process-nextick-args@~2.0.0:
version "2.0.1"