Merge branch 'develop' into feature/item_types_value_type_validation

This commit is contained in:
iLLiCiTiT 2020-10-02 16:06:20 +02:00
commit a5a1a51993
196 changed files with 2612 additions and 461 deletions

View file

@ -1,6 +1,7 @@
from .settings import (
system_settings,
project_settings
project_settings,
environments
)
from pypeapp import (
Logger,
@ -55,6 +56,7 @@ from .lib import _subprocess as subprocess
__all__ = [
"system_settings",
"project_settings",
"environments",
"Logger",
"Anatomy",

View file

@ -1,5 +1,6 @@
import os
import sys
from uuid import uuid4
from avalon import api, io, harmony
from avalon.vendor import Qt
@ -8,8 +9,11 @@ import pyblish.api
from pype import lib
signature = str(uuid4())
def set_scene_settings(settings):
func = """function func(args)
func = """function %s_func(args)
{
if (args[0]["fps"])
{
@ -36,8 +40,8 @@ def set_scene_settings(settings):
)
}
}
func
"""
%s_func
""" % (signature, signature)
harmony.send({"function": func, "args": [settings]})
@ -107,15 +111,15 @@ def check_inventory():
outdated_containers.append(container)
# Colour nodes.
func = """function func(args){
func = """function %s_func(args){
for( var i =0; i <= args[0].length - 1; ++i)
{
var red_color = new ColorRGBA(255, 0, 0, 255);
node.setColor(args[0][i], red_color);
}
}
func
"""
%s_func
""" % (signature, signature)
outdated_nodes = []
for container in outdated_containers:
if container["loader"] == "ImageSequenceLoader":
@ -144,7 +148,7 @@ def application_launch():
def export_template(backdrops, nodes, filepath):
func = """function func(args)
func = """function %s_func(args)
{
var temp_node = node.add("Top", "temp_note", "NOTE", 0, 0, 0);
@ -179,8 +183,8 @@ def export_template(backdrops, nodes, filepath):
Action.perform("onActionUpToParent()", "Node View");
node.deleteNode(template_group, true, true);
}
func
"""
%s_func
""" % (signature, signature)
harmony.send({
"function": func,
"args": [
@ -221,12 +225,15 @@ def install():
def on_pyblish_instance_toggled(instance, old_value, new_value):
"""Toggle node enabling on instance toggles."""
func = """function func(args)
func = """function %s_func(args)
{
node.setEnable(args[0], args[1])
}
func
"""
harmony.send(
{"function": func, "args": [instance[0], new_value]}
)
%s_func
""" % (signature, signature)
try:
harmony.send(
{"function": func, "args": [instance[0], new_value]}
)
except IndexError:
print(f"Instance '{instance}' is missing node")

View file

@ -0,0 +1,6 @@
# -*- coding: utf-8 -*-
from .base import PypeModule
__all__ = (
"PypeModule",
)

View file

@ -1,16 +1,27 @@
from Qt import QtWidgets
from avalon.tools import libraryloader
from pype.api import Logger
from pype.tools.launcher import LauncherWindow, actions
class AvalonApps:
def __init__(self, main_parent=None, parent=None):
self.log = Logger().get_logger(__name__)
self.main_parent = main_parent
self.tray_init(main_parent, parent)
def tray_init(self, main_parent, parent):
from avalon.tools.libraryloader import app
from avalon import style
from pype.tools.launcher import LauncherWindow, actions
self.parent = parent
self.main_parent = main_parent
self.app_launcher = LauncherWindow()
self.libraryloader = app.Window(
icon=self.parent.icon,
show_projects=True,
show_libraries=True
)
self.libraryloader.setStyleSheet(style.load_stylesheet())
# actions.register_default_actions()
actions.register_config_actions()
@ -23,6 +34,7 @@ class AvalonApps:
# Definition of Tray menu
def tray_menu(self, parent_menu=None):
from Qt import QtWidgets
# Actions
if parent_menu is None:
if self.parent is None:
@ -52,9 +64,11 @@ class AvalonApps:
self.app_launcher.activateWindow()
def show_library_loader(self):
libraryloader.show(
parent=self.main_parent,
icon=self.parent.icon,
show_projects=True,
show_libraries=True
)
self.libraryloader.show()
# Raise and activate the window
# for MacOS
self.libraryloader.raise_()
# for Windows
self.libraryloader.activateWindow()
self.libraryloader.refresh()

38
pype/modules/base.py Normal file
View file

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
"""Base class for Pype Modules."""
from uuid import uuid4
from abc import ABC, abstractmethod
from pype.api import Logger
class PypeModule(ABC):
"""Base class of pype module.
Attributes:
id (UUID): Module id.
enabled (bool): Is module enabled.
name (str): Module name.
"""
enabled = False
name = None
_id = None
def __init__(self, settings):
if self.name is None:
self.name = self.__class__.__name__
self.log = Logger().get_logger(self.name)
self.settings = settings.get(self.name)
self.enabled = settings.get("enabled", False)
self._id = uuid4()
@property
def id(self):
return self._id
@abstractmethod
def startup_environments(self):
"""Get startup environments for module."""
return {}

View file

@ -1,9 +1,8 @@
import os
import threading
import time
from pype.api import Logger
from avalon import style
from Qt import QtWidgets
from .widgets import ClockifySettings, MessageWidget
from .clockify_api import ClockifyAPI
from .constants import CLOCKIFY_FTRACK_USER_PATH
@ -17,11 +16,21 @@ class ClockifyModule:
os.environ["CLOCKIFY_WORKSPACE"] = self.workspace_name
self.timer_manager = None
self.MessageWidgetClass = None
self.clockapi = ClockifyAPI(master_parent=self)
self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")
self.tray_init(main_parent, parent)
def tray_init(self, main_parent, parent):
from .widgets import ClockifySettings, MessageWidget
self.MessageWidgetClass = MessageWidget
self.main_parent = main_parent
self.parent = parent
self.clockapi = ClockifyAPI(master_parent=self)
self.message_widget = None
self.widget_settings = ClockifySettings(main_parent, self)
self.widget_settings_required = None
@ -57,11 +66,10 @@ class ClockifyModule:
)
if 'AvalonApps' in modules:
from launcher import lib
actions_path = os.path.sep.join([
actions_path = os.path.join(
os.path.dirname(__file__),
'launcher_actions'
])
)
current = os.environ.get('AVALON_ACTIONS', '')
if current:
current += os.pathsep
@ -78,12 +86,12 @@ class ClockifyModule:
self.stop_timer()
def timer_started(self, data):
if hasattr(self, 'timer_manager'):
if self.timer_manager:
self.timer_manager.start_timers(data)
def timer_stopped(self):
self.bool_timer_run = False
if hasattr(self, 'timer_manager'):
if self.timer_manager:
self.timer_manager.stop_timers()
def start_timer_check(self):
@ -102,7 +110,7 @@ class ClockifyModule:
self.thread_timer_check = None
def check_running(self):
import time
while self.bool_thread_check_running is True:
bool_timer_run = False
if self.clockapi.get_in_progress() is not None:
@ -156,15 +164,14 @@ class ClockifyModule:
self.timer_stopped()
def signed_in(self):
if hasattr(self, 'timer_manager'):
if not self.timer_manager:
return
if not self.timer_manager:
return
if not self.timer_manager.last_task:
return
if not self.timer_manager.last_task:
return
if self.timer_manager.is_running:
self.start_timer_manager(self.timer_manager.last_task)
if self.timer_manager.is_running:
self.start_timer_manager(self.timer_manager.last_task)
def start_timer(self, input_data):
# If not api key is not entered then skip
@ -197,11 +204,14 @@ class ClockifyModule:
"<br><br>Please inform your Project Manager."
).format(project_name, str(self.clockapi.workspace_name))
self.message_widget = MessageWidget(
self.main_parent, msg, "Clockify - Info Message"
)
self.message_widget.closed.connect(self.on_message_widget_close)
self.message_widget.show()
if self.MessageWidgetClass:
self.message_widget = self.MessageWidgetClass(
self.main_parent, msg, "Clockify - Info Message"
)
self.message_widget.closed.connect(
self.on_message_widget_close
)
self.message_widget.show()
return
@ -227,31 +237,29 @@ class ClockifyModule:
# Definition of Tray menu
def tray_menu(self, parent_menu):
# Menu for Tray App
self.menu = QtWidgets.QMenu('Clockify', parent_menu)
self.menu.setProperty('submenu', 'on')
self.menu.setStyleSheet(style.load_stylesheet())
from Qt import QtWidgets
menu = QtWidgets.QMenu("Clockify", parent_menu)
menu.setProperty("submenu", "on")
# Actions
self.aShowSettings = QtWidgets.QAction(
"Settings", self.menu
)
self.aStopTimer = QtWidgets.QAction(
"Stop timer", self.menu
)
action_show_settings = QtWidgets.QAction("Settings", menu)
action_stop_timer = QtWidgets.QAction("Stop timer", menu)
self.menu.addAction(self.aShowSettings)
self.menu.addAction(self.aStopTimer)
menu.addAction(action_show_settings)
menu.addAction(action_stop_timer)
self.aShowSettings.triggered.connect(self.show_settings)
self.aStopTimer.triggered.connect(self.stop_timer)
action_show_settings.triggered.connect(self.show_settings)
action_stop_timer.triggered.connect(self.stop_timer)
self.action_stop_timer = action_stop_timer
self.set_menu_visibility()
parent_menu.addMenu(self.menu)
parent_menu.addMenu(menu)
def show_settings(self):
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
self.widget_settings.show()
def set_menu_visibility(self):
self.aStopTimer.setVisible(self.bool_timer_run)
self.action_stop_timer.setVisible(self.bool_timer_run)

View file

@ -1,2 +1,12 @@
from .lib import *
from . import ftrack_server
from .ftrack_server import FtrackServer, check_ftrack_url
from .lib import BaseHandler, BaseEvent, BaseAction
__all__ = (
"ftrack_server",
"FtrackServer",
"check_ftrack_url",
"BaseHandler",
"BaseEvent",
"BaseAction"
)

View file

@ -1,2 +1,8 @@
from .ftrack_server import FtrackServer
from .lib import check_ftrack_url
__all__ = (
"FtrackServer",
"check_ftrack_url"
)

View file

@ -16,9 +16,9 @@ import pymongo
from pype.api import decompose_url
class NotActiveTable(Exception):
class NotActiveCollection(Exception):
def __init__(self, *args, **kwargs):
msg = "Active table is not set. (This is bug)"
msg = "Active collection is not set. (This is bug)"
if not (args or kwargs):
args = [msg]
super().__init__(*args, **kwargs)
@ -40,12 +40,12 @@ def auto_reconnect(func):
return decorated
def check_active_table(func):
def check_active_collection(func):
"""Check if CustomDbConnector has active collection."""
@functools.wraps(func)
def decorated(obj, *args, **kwargs):
if not obj.active_table:
raise NotActiveTable()
if not obj.active_collection:
raise NotActiveCollection()
return func(obj, *args, **kwargs)
return decorated
@ -55,7 +55,7 @@ class CustomDbConnector:
timeout = int(os.environ["AVALON_TIMEOUT"])
def __init__(
self, uri, database_name, port=None, table_name=None
self, uri, database_name, port=None, collection_name=None
):
self._mongo_client = None
self._sentry_client = None
@ -76,10 +76,10 @@ class CustomDbConnector:
self._port = port
self._database_name = database_name
self.active_table = table_name
self.active_collection = collection_name
def __getitem__(self, key):
# gives direct access to collection withou setting `active_table`
# gives direct access to collection withou setting `active_collection`
return self._database[key]
def __getattribute__(self, attr):
@ -88,9 +88,11 @@ class CustomDbConnector:
try:
return super(CustomDbConnector, self).__getattribute__(attr)
except AttributeError:
if self.active_table is None:
raise NotActiveTable()
return self._database[self.active_table].__getattribute__(attr)
if self.active_collection is None:
raise NotActiveCollection()
return self._database[self.active_collection].__getattribute__(
attr
)
def install(self):
"""Establish a persistent connection to the database"""
@ -146,46 +148,30 @@ class CustomDbConnector:
self._is_installed = False
atexit.unregister(self.uninstall)
def create_table(self, name, **options):
if self.exist_table(name):
def collection_exists(self, collection_name):
return collection_name in self.collections()
def create_collection(self, name, **options):
if self.collection_exists(name):
return
return self._database.create_collection(name, **options)
def exist_table(self, table_name):
return table_name in self.tables()
def create_table(self, name, **options):
if self.exist_table(name):
return
return self._database.create_collection(name, **options)
def exist_table(self, table_name):
return table_name in self.tables()
def tables(self):
"""List available tables
Returns:
list of table names
"""
collection_names = self.collections()
for table_name in collection_names:
if table_name in ("system.indexes",):
continue
yield table_name
@auto_reconnect
def collections(self):
return self._database.collection_names()
for col_name in self._database.collection_names():
if col_name not in ("system.indexes",):
yield col_name
@check_active_table
@check_active_collection
@auto_reconnect
def insert_one(self, item, **options):
assert isinstance(item, dict), "item must be of type <dict>"
return self._database[self.active_table].insert_one(item, **options)
return self._database[self.active_collection].insert_one(
item, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def insert_many(self, items, ordered=True, **options):
# check if all items are valid
@ -194,72 +180,74 @@ class CustomDbConnector:
assert isinstance(item, dict), "`item` must be of type <dict>"
options["ordered"] = ordered
return self._database[self.active_table].insert_many(items, **options)
return self._database[self.active_collection].insert_many(
items, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def find(self, filter, projection=None, sort=None, **options):
options["sort"] = sort
return self._database[self.active_table].find(
return self._database[self.active_collection].find(
filter, projection, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def find_one(self, filter, projection=None, sort=None, **options):
assert isinstance(filter, dict), "filter must be <dict>"
options["sort"] = sort
return self._database[self.active_table].find_one(
return self._database[self.active_collection].find_one(
filter,
projection,
**options
)
@check_active_table
@check_active_collection
@auto_reconnect
def replace_one(self, filter, replacement, **options):
return self._database[self.active_table].replace_one(
return self._database[self.active_collection].replace_one(
filter, replacement, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def update_one(self, filter, update, **options):
return self._database[self.active_table].update_one(
return self._database[self.active_collection].update_one(
filter, update, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def update_many(self, filter, update, **options):
return self._database[self.active_table].update_many(
return self._database[self.active_collection].update_many(
filter, update, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def distinct(self, **options):
return self._database[self.active_table].distinct(**options)
return self._database[self.active_collection].distinct(**options)
@check_active_table
@check_active_collection
@auto_reconnect
def drop_collection(self, name_or_collection, **options):
return self._database[self.active_table].drop(
return self._database[self.active_collection].drop(
name_or_collection, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def delete_one(self, filter, collation=None, **options):
options["collation"] = collation
return self._database[self.active_table].delete_one(
return self._database[self.active_collection].delete_one(
filter, **options
)
@check_active_table
@check_active_collection
@auto_reconnect
def delete_many(self, filter, collation=None, **options):
options["collation"] = collation
return self._database[self.active_table].delete_many(
return self._database[self.active_collection].delete_many(
filter, **options
)

View file

@ -26,7 +26,7 @@ from pype.api import (
compose_url
)
from pype.modules.ftrack.lib.custom_db_connector import CustomDbConnector
from .custom_db_connector import CustomDbConnector
TOPIC_STATUS_SERVER = "pype.event.server.status"
@ -153,9 +153,9 @@ class StorerEventHub(SocketBaseEventHub):
class ProcessEventHub(SocketBaseEventHub):
hearbeat_msg = b"processor"
uri, port, database, table_name = get_ftrack_event_mongo_info()
uri, port, database, collection_name = get_ftrack_event_mongo_info()
is_table_created = False
is_collection_created = False
pypelog = Logger().get_logger("Session Processor")
def __init__(self, *args, **kwargs):
@ -163,7 +163,7 @@ class ProcessEventHub(SocketBaseEventHub):
self.uri,
self.database,
self.port,
self.table_name
self.collection_name
)
super(ProcessEventHub, self).__init__(*args, **kwargs)
@ -184,7 +184,7 @@ class ProcessEventHub(SocketBaseEventHub):
"Error with Mongo access, probably permissions."
"Check if exist database with name \"{}\""
" and collection \"{}\" inside."
).format(self.database, self.table_name))
).format(self.database, self.collection_name))
self.sock.sendall(b"MongoError")
sys.exit(0)

View file

@ -12,7 +12,9 @@ from pype.modules.ftrack.ftrack_server.lib import (
get_ftrack_event_mongo_info,
TOPIC_STATUS_SERVER, TOPIC_STATUS_SERVER_RESULT
)
from pype.modules.ftrack.lib.custom_db_connector import CustomDbConnector
from pype.modules.ftrack.ftrack_server.custom_db_connector import (
CustomDbConnector
)
from pype.api import Logger
log = Logger().get_logger("Event storer")
@ -23,8 +25,8 @@ class SessionFactory:
session = None
uri, port, database, table_name = get_ftrack_event_mongo_info()
dbcon = CustomDbConnector(uri, database, port, table_name)
uri, port, database, collection_name = get_ftrack_event_mongo_info()
dbcon = CustomDbConnector(uri, database, port, collection_name)
# ignore_topics = ["ftrack.meta.connected"]
ignore_topics = []
@ -200,7 +202,7 @@ def main(args):
"Error with Mongo access, probably permissions."
"Check if exist database with name \"{}\""
" and collection \"{}\" inside."
).format(database, table_name))
).format(database, collection_name))
sock.sendall(b"MongoError")
finally:

View file

@ -1022,7 +1022,7 @@ class SyncEntitiesFactory:
continue
ent_path_items = [ent["name"] for ent in entity["link"]]
parents = ent_path_items[1:len(ent_path_items)-1:]
parents = ent_path_items[1:len(ent_path_items) - 1:]
hierarchy = ""
if len(parents) > 0:
hierarchy = os.path.sep.join(parents)
@ -1141,7 +1141,7 @@ class SyncEntitiesFactory:
if not is_right and not else_match_better:
entity = entity_dict["entity"]
ent_path_items = [ent["name"] for ent in entity["link"]]
parents = ent_path_items[1:len(ent_path_items)-1:]
parents = ent_path_items[1:len(ent_path_items) - 1:]
av_parents = av_ent_by_mongo_id["data"]["parents"]
if av_parents == parents:
is_right = True

View file

@ -2,7 +2,7 @@ import functools
import time
from pype.api import Logger
import ftrack_api
from pype.modules.ftrack.ftrack_server.lib import SocketSession
from pype.modules.ftrack import ftrack_server
class MissingPermision(Exception):
@ -41,7 +41,7 @@ class BaseHandler(object):
self.log = Logger().get_logger(self.__class__.__name__)
if not(
isinstance(session, ftrack_api.session.Session) or
isinstance(session, SocketSession)
isinstance(session, ftrack_server.lib.SocketSession)
):
raise Exception((
"Session object entered with args is instance of \"{}\""
@ -49,7 +49,7 @@ class BaseHandler(object):
).format(
str(type(session)),
str(ftrack_api.session.Session),
str(SocketSession)
str(ftrack_server.lib.SocketSession)
))
self._session = session

View file

@ -1,7 +1,7 @@
import os
import requests
from avalon import style
from pype.modules.ftrack import credentials
from pype.modules.ftrack.lib import credentials
from . import login_tools
from pype.api import resources
from Qt import QtCore, QtGui, QtWidgets
@ -238,6 +238,8 @@ class CredentialsDialog(QtWidgets.QDialog):
# If there is an existing server thread running we need to stop it.
if self._login_server_thread:
if self._login_server_thread.isAlive():
self._login_server_thread.stop()
self._login_server_thread.join()
self._login_server_thread = None

View file

@ -61,12 +61,17 @@ class LoginServerThread(threading.Thread):
def __init__(self, url, callback):
self.url = url
self.callback = callback
self._server = None
super(LoginServerThread, self).__init__()
def _handle_login(self, api_user, api_key):
'''Login to server with *api_user* and *api_key*.'''
self.callback(api_user, api_key)
def stop(self):
if self._server:
self._server.server_close()
def run(self):
'''Listen for events.'''
self._server = HTTPServer(

View file

@ -1,6 +1,4 @@
from Qt import QtWidgets
from pype.api import Logger
from ..gui.app import LogsWindow
class LoggingModule:
@ -8,7 +6,13 @@ class LoggingModule:
self.parent = parent
self.log = Logger().get_logger(self.__class__.__name__, "logging")
self.window = None
self.tray_init(main_parent, parent)
def tray_init(self, main_parent, parent):
try:
from .gui.app import LogsWindow
self.window = LogsWindow()
self.tray_menu = self._tray_menu
except Exception:
@ -18,12 +22,12 @@ class LoggingModule:
# Definition of Tray menu
def _tray_menu(self, parent_menu):
from Qt import QtWidgets
# Menu for Tray App
menu = QtWidgets.QMenu('Logging', parent_menu)
# menu.setProperty('submenu', 'on')
show_action = QtWidgets.QAction("Show Logs", menu)
show_action.triggered.connect(self.on_show_logs)
show_action.triggered.connect(self._show_logs_gui)
menu.addAction(show_action)
parent_menu.addMenu(menu)
@ -34,5 +38,6 @@ class LoggingModule:
def process_modules(self, modules):
return
def on_show_logs(self):
self.window.show()
def _show_logs_gui(self):
if self.window:
self.window.show()

View file

@ -1,10 +1,7 @@
import appdirs
from avalon import style
from Qt import QtWidgets
import os
import json
from .widget_login import MusterLogin
from avalon.vendor import requests
import appdirs
import requests
class MusterModule:
@ -21,6 +18,11 @@ class MusterModule:
self.cred_path = os.path.join(
self.cred_folder_path, self.cred_filename
)
self.tray_init(main_parent, parent)
def tray_init(self, main_parent, parent):
from .widget_login import MusterLogin
self.main_parent = main_parent
self.parent = parent
self.widget_login = MusterLogin(main_parent, self)
@ -38,10 +40,6 @@ class MusterModule:
pass
def process_modules(self, modules):
def api_callback():
self.aShowLogin.trigger()
if "RestApiServer" in modules:
def api_show_login():
self.aShowLogin.trigger()
@ -51,13 +49,12 @@ class MusterModule:
# Definition of Tray menu
def tray_menu(self, parent):
"""
Add **change credentials** option to tray menu.
"""
"""Add **change credentials** option to tray menu."""
from Qt import QtWidgets
# Menu for Tray App
self.menu = QtWidgets.QMenu('Muster', parent)
self.menu.setProperty('submenu', 'on')
self.menu.setStyleSheet(style.load_stylesheet())
# Actions
self.aShowLogin = QtWidgets.QAction(
@ -91,9 +88,9 @@ class MusterModule:
if not MUSTER_REST_URL:
raise AttributeError("Muster REST API url not set")
params = {
'username': username,
'password': password
}
'username': username,
'password': password
}
api_entry = '/api/login'
response = self._requests_post(
MUSTER_REST_URL + api_entry, params=params)

View file

@ -1,6 +1,6 @@
import os
import socket
from Qt import QtCore
import threading
from socketserver import ThreadingMixIn
from http.server import HTTPServer
@ -155,14 +155,15 @@ class RestApiServer:
def is_running(self):
return self.rest_api_thread.is_running
def tray_exit(self):
self.stop()
def stop(self):
self.rest_api_thread.is_running = False
def thread_stopped(self):
self._is_running = False
self.rest_api_thread.stop()
self.rest_api_thread.join()
class RestApiThread(QtCore.QThread):
class RestApiThread(threading.Thread):
""" Listener for REST requests.
It is possible to register callbacks for url paths.
@ -174,6 +175,12 @@ class RestApiThread(QtCore.QThread):
self.is_running = False
self.module = module
self.port = port
self.httpd = None
def stop(self):
self.is_running = False
if self.httpd:
self.httpd.server_close()
def run(self):
self.is_running = True
@ -185,12 +192,14 @@ class RestApiThread(QtCore.QThread):
)
with ThreadingSimpleServer(("", self.port), Handler) as httpd:
self.httpd = httpd
while self.is_running:
httpd.handle_request()
except Exception:
log.warning(
"Rest Api Server service has failed", exc_info=True
)
self.httpd = None
self.is_running = False
self.module.thread_stopped()

View file

@ -2,7 +2,6 @@ import os
import sys
import subprocess
import pype
from pype import tools
class StandAlonePublishModule:
@ -30,6 +29,7 @@ class StandAlonePublishModule:
))
def show(self):
from pype import tools
standalone_publisher_tool_path = os.path.join(
os.path.dirname(tools.__file__),
"standalonepublish"

View file

@ -1,5 +1,4 @@
from .timers_manager import TimersManager
from .widget_user_idle import WidgetUserIdle
CLASS_DEFINIION = TimersManager

View file

@ -1,21 +1,7 @@
from .widget_user_idle import WidgetUserIdle, SignalHandler
from pype.api import Logger, config
from pype.api import Logger
class Singleton(type):
""" Signleton implementation
"""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(
Singleton, cls
).__call__(*args, **kwargs)
return cls._instances[cls]
class TimersManager(metaclass=Singleton):
class TimersManager:
""" Handles about Timers.
Should be able to start/stop all timers at once.
@ -41,7 +27,13 @@ class TimersManager(metaclass=Singleton):
self.idle_man = None
self.signal_handler = None
self.trat_init(tray_widget, main_widget)
def trat_init(self, tray_widget, main_widget):
from .widget_user_idle import WidgetUserIdle, SignalHandler
self.widget_user_idle = WidgetUserIdle(self, tray_widget)
self.signal_handler = SignalHandler(self)
def set_signal_times(self):
try:
@ -119,7 +111,6 @@ class TimersManager(metaclass=Singleton):
"""
if 'IdleManager' in modules:
self.signal_handler = SignalHandler(self)
if self.set_signal_times() is True:
self.register_to_idle_manager(modules['IdleManager'])

View file

@ -3,8 +3,6 @@ import json
import getpass
import appdirs
from Qt import QtWidgets
from .widget_user import UserWidget
from pype.api import Logger
@ -24,6 +22,12 @@ class UserModule:
self.cred_path = os.path.normpath(os.path.join(
self.cred_folder_path, self.cred_filename
))
self.widget_login = None
self.tray_init(main_parent, parent)
def tray_init(self, main_parent=None, parent=None):
from .widget_user import UserWidget
self.widget_login = UserWidget(self)
self.load_credentials()
@ -66,6 +70,7 @@ class UserModule:
# Definition of Tray menu
def tray_menu(self, parent_menu):
from Qt import QtWidgets
"""Add menu or action to Tray(or parent)'s menu"""
action = QtWidgets.QAction("Username", parent_menu)
action.triggered.connect(self.show_widget)
@ -121,7 +126,8 @@ class UserModule:
self.cred = {"username": username}
os.environ[self.env_name] = username
self.widget_login.set_user(username)
if self.widget_login:
self.widget_login.set_user(username)
try:
file = open(self.cred_path, "w")
file.write(json.dumps(self.cred))

View file

@ -31,12 +31,13 @@ class WebSocketServer():
self.client = None
self.handlers = {}
port = None
websocket_url = os.getenv("WEBSOCKET_URL")
if websocket_url:
parsed = urllib.parse.urlparse(websocket_url)
port = parsed.port
if not port:
port = 8099 # fallback
port = 8098 # fallback
self.app = web.Application()

102
pype/modules_manager.py Normal file
View file

@ -0,0 +1,102 @@
import os
import inspect
import pype.modules
from pype.modules import PypeModule
from pype.settings import system_settings
from pype.api import Logger
class PypeModuleManager:
skip_module_names = ("__pycache__", )
def __init__(self):
self.log = Logger().get_logger(
"{}.{}".format(__name__, self.__class__.__name__)
)
self.pype_modules = self.find_pype_modules()
def modules_environments(self):
environments = {}
for pype_module in self.pype_modules.values():
environments.update(pype_module.startup_environments())
return environments
def find_pype_modules(self):
settings = system_settings()
modules = []
dirpath = os.path.dirname(pype.modules.__file__)
for module_name in os.listdir(dirpath):
# Check if path lead to a folder
full_path = os.path.join(dirpath, module_name)
if not os.path.isdir(full_path):
continue
# Skip known invalid names
if module_name in self.skip_module_names:
continue
import_name = "pype.modules.{}".format(module_name)
try:
modules.append(
__import__(import_name, fromlist=[""])
)
except Exception:
self.log.warning(
"Couldn't import {}".format(import_name), exc_info=True
)
pype_module_classes = []
for module in modules:
try:
pype_module_classes.extend(
self._classes_from_module(PypeModule, module)
)
except Exception:
self.log.warning(
"Couldn't import {}".format(import_name), exc_info=True
)
pype_modules = {}
for pype_module_class in pype_module_classes:
try:
pype_module = pype_module_class(settings)
if pype_module.enabled:
pype_modules[pype_module.id] = pype_module
except Exception:
self.log.warning(
"Couldn't create instance of {}".format(
pype_module_class.__class__.__name__
),
exc_info=True
)
return pype_modules
def _classes_from_module(self, superclass, module):
classes = list()
def recursive_bases(klass):
output = []
output.extend(klass.__bases__)
for base in klass.__bases__:
output.extend(recursive_bases(base))
return output
for name in dir(module):
# It could be anything at this point
obj = getattr(module, name)
if not inspect.isclass(obj) or not len(obj.__bases__) > 0:
continue
# Use string comparison rather than `issubclass`
# in order to support reloading of this module.
bases = recursive_bases(obj)
if not any(base.__name__ == superclass.__name__ for base in bases):
continue
classes.append(obj)
return classes

View file

@ -97,6 +97,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
def process(self, instance):
@ -178,6 +179,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Adding metadata
@ -228,6 +230,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Adding metadata
@ -242,6 +245,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
session.commit()
except Exception:
session.rollback()
session._configure_locations()
self.log.warning((
"Comment was not possible to set for AssetVersion"
"\"{0}\". Can't set it's value to: \"{1}\""
@ -258,6 +262,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
continue
except Exception:
session.rollback()
session._configure_locations()
self.log.warning((
"Custom Attrubute \"{0}\""
@ -272,6 +277,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Component
@ -316,6 +322,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
# Reset members in memory
@ -432,6 +439,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)
if assetversion_entity not in used_asset_versions:

View file

@ -145,4 +145,5 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin):
except Exception:
tp, value, tb = sys.exc_info()
session.rollback()
session._configure_locations()
six.reraise(tp, value, tb)

View file

@ -2,7 +2,6 @@ import sys
import six
import pyblish.api
from avalon import io
from pprint import pformat
try:
from pype.modules.ftrack.lib.avalon_sync import CUST_ATTR_AUTO_SYNC
@ -46,9 +45,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
hierarchy_context = self.context.data["hierarchyContext"]
self.log.debug(
f"__ hierarchy_context: `{pformat(hierarchy_context)}`")
self.session = self.context.data["ftrackSession"]
project_name = self.context.data["projectEntity"]["name"]
query = 'Project where full_name is "{}"'.format(project_name)
@ -134,6 +130,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# TASKS
@ -162,6 +159,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Incoming links.
@ -171,6 +169,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Create notes.
@ -191,6 +190,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Import children.
@ -207,6 +207,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
# Create new links.
@ -248,6 +249,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
return task
@ -262,6 +264,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
self.session._configure_locations()
six.reraise(tp, value, tb)
return entity
@ -276,7 +279,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
raise
self.session._configure_locations()
six.reraise(tp, value, tb)
def auto_sync_on(self, project):
@ -289,4 +293,5 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
raise
self.session._configure_locations()
six.reraise(tp, value, tb)

View file

@ -20,8 +20,8 @@ class CopyFile(api.Loader):
def copy_file_to_clipboard(path):
from avalon.vendor.Qt import QtCore, QtWidgets
app = QtWidgets.QApplication.instance()
assert app, "Must have running QApplication instance"
clipboard = QtWidgets.QApplication.clipboard()
assert clipboard, "Must have running QApplication instance"
# Build mime data for clipboard
data = QtCore.QMimeData()
@ -29,5 +29,4 @@ class CopyFile(api.Loader):
data.setUrls([url])
# Set to Clipboard
clipboard = app.clipboard()
clipboard.setMimeData(data)

View file

@ -19,11 +19,10 @@ class CopyFilePath(api.Loader):
@staticmethod
def copy_path_to_clipboard(path):
from avalon.vendor.Qt import QtCore, QtWidgets
from avalon.vendor.Qt import QtWidgets
app = QtWidgets.QApplication.instance()
assert app, "Must have running QApplication instance"
clipboard = QtWidgets.QApplication.clipboard()
assert clipboard, "Must have running QApplication instance"
# Set to Clipboard
clipboard = app.clipboard()
clipboard.setText(os.path.normpath(path))

View file

@ -23,123 +23,256 @@ Provides:
import copy
import json
import collections
from avalon import io
import pyblish.api
class CollectAnatomyInstanceData(pyblish.api.InstancePlugin):
"""Collect Instance specific Anatomy data."""
class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
"""Collect Instance specific Anatomy data.
Plugin is running for all instances on context even not active instances.
"""
order = pyblish.api.CollectorOrder + 0.49
label = "Collect Anatomy Instance data"
def process(self, instance):
# get all the stuff from the database
anatomy_data = copy.deepcopy(instance.context.data["anatomyData"])
project_entity = instance.context.data["projectEntity"]
context_asset_entity = instance.context.data["assetEntity"]
instance_asset_entity = instance.data.get("assetEntity")
def process(self, context):
self.log.info("Collecting anatomy data for all instances.")
asset_name = instance.data["asset"]
self.fill_missing_asset_docs(context)
self.fill_latest_versions(context)
self.fill_anatomy_data(context)
# There is possibility that assetEntity on instance is already set
# which can happen in standalone publisher
if (
instance_asset_entity
and instance_asset_entity["name"] == asset_name
):
asset_entity = instance_asset_entity
self.log.info("Anatomy Data collection finished.")
# Check if asset name is the same as what is in context
# - they may be different, e.g. in NukeStudio
elif context_asset_entity["name"] == asset_name:
asset_entity = context_asset_entity
def fill_missing_asset_docs(self, context):
self.log.debug("Qeurying asset documents for instances.")
else:
asset_entity = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
})
context_asset_doc = context.data["assetEntity"]
subset_name = instance.data["subset"]
version_number = instance.data.get("version")
latest_version = None
instances_with_missing_asset_doc = collections.defaultdict(list)
for instance in context:
instance_asset_doc = instance.data.get("assetEntity")
_asset_name = instance.data["asset"]
if asset_entity:
subset_entity = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset_entity["_id"]
})
# There is possibility that assetEntity on instance is already set
# which can happen in standalone publisher
if (
instance_asset_doc
and instance_asset_doc["name"] == _asset_name
):
continue
# Check if asset name is the same as what is in context
# - they may be different, e.g. in NukeStudio
if context_asset_doc["name"] == _asset_name:
instance.data["assetEntity"] = context_asset_doc
if subset_entity is None:
self.log.debug("Subset entity does not exist yet.")
else:
version_entity = io.find_one(
{
"type": "version",
"parent": subset_entity["_id"]
},
sort=[("name", -1)]
)
if version_entity:
latest_version = version_entity["name"]
instances_with_missing_asset_doc[_asset_name].append(instance)
# If version is not specified for instance or context
if version_number is None:
# TODO we should be able to change default version by studio
# preferences (like start with version number `0`)
version_number = 1
# use latest version (+1) if already any exist
if latest_version is not None:
version_number += int(latest_version)
if not instances_with_missing_asset_doc:
self.log.debug("All instances already had right asset document.")
return
anatomy_updates = {
"asset": asset_name,
"family": instance.data["family"],
"subset": subset_name,
"version": version_number
asset_names = list(instances_with_missing_asset_doc.keys())
self.log.debug("Querying asset documents with names: {}".format(
", ".join(["\"{}\"".format(name) for name in asset_names])
))
asset_docs = io.find({
"type": "asset",
"name": {"$in": asset_names}
})
asset_docs_by_name = {
asset_doc["name"]: asset_doc
for asset_doc in asset_docs
}
if (
asset_entity
and asset_entity["_id"] != context_asset_entity["_id"]
):
parents = asset_entity["data"].get("parents") or list()
anatomy_updates["hierarchy"] = "/".join(parents)
task_name = instance.data.get("task")
if task_name:
anatomy_updates["task"] = task_name
not_found_asset_names = []
for asset_name, instances in instances_with_missing_asset_doc.items():
asset_doc = asset_docs_by_name.get(asset_name)
if not asset_doc:
not_found_asset_names.append(asset_name)
continue
# Version should not be collected since may be instance
anatomy_data.update(anatomy_updates)
for _instance in instances:
_instance.data["assetEntity"] = asset_doc
resolution_width = instance.data.get("resolutionWidth")
if resolution_width:
anatomy_data["resolution_width"] = resolution_width
if not_found_asset_names:
joined_asset_names = ", ".join(
["\"{}\"".format(name) for name in not_found_asset_names]
)
self.log.warning((
"Not found asset documents with names \"{}\"."
).format(joined_asset_names))
resolution_height = instance.data.get("resolutionHeight")
if resolution_height:
anatomy_data["resolution_height"] = resolution_height
def fill_latest_versions(self, context):
"""Try to find latest version for each instance's subset.
pixel_aspect = instance.data.get("pixelAspect")
if pixel_aspect:
anatomy_data["pixel_aspect"] = float("{:0.2f}".format(
float(pixel_aspect)))
Key "latestVersion" is always set to latest version or `None`.
fps = instance.data.get("fps")
if fps:
anatomy_data["fps"] = float("{:0.2f}".format(
float(fps)))
Args:
context (pyblish.Context)
instance.data["projectEntity"] = project_entity
instance.data["assetEntity"] = asset_entity
instance.data["anatomyData"] = anatomy_data
instance.data["latestVersion"] = latest_version
# TODO should be version number set here?
instance.data["version"] = version_number
Returns:
None
self.log.info("Instance anatomy Data collected")
self.log.debug(json.dumps(anatomy_data, indent=4))
"""
self.log.debug("Qeurying latest versions for instances.")
hierarchy = {}
subset_names = set()
asset_ids = set()
for instance in context:
# Make sure `"latestVersion"` key is set
latest_version = instance.data.get("latestVersion")
instance.data["latestVersion"] = latest_version
# Skip instances withou "assetEntity"
asset_doc = instance.data.get("assetEntity")
if not asset_doc:
continue
# Store asset ids and subset names for queries
asset_id = asset_doc["_id"]
subset_name = instance.data["subset"]
asset_ids.add(asset_id)
subset_names.add(subset_name)
# Prepare instance hiearchy for faster filling latest versions
if asset_id not in hierarchy:
hierarchy[asset_id] = {}
if subset_name not in hierarchy[asset_id]:
hierarchy[asset_id][subset_name] = []
hierarchy[asset_id][subset_name].append(instance)
subset_docs = list(io.find({
"type": "subset",
"parent": {"$in": list(asset_ids)},
"name": {"$in": list(subset_names)}
}))
subset_ids = [
subset_doc["_id"]
for subset_doc in subset_docs
]
last_version_by_subset_id = self._query_last_versions(subset_ids)
for subset_doc in subset_docs:
subset_id = subset_doc["_id"]
last_version = last_version_by_subset_id.get(subset_id)
if last_version is None:
continue
asset_id = subset_doc["parent"]
subset_name = subset_doc["name"]
_instances = hierarchy[asset_id][subset_name]
for _instance in _instances:
_instance.data["latestVersion"] = last_version
def _query_last_versions(self, subset_ids):
"""Retrieve all latest versions for entered subset_ids.
Args:
subset_ids (list): List of subset ids with type `ObjectId`.
Returns:
dict: Key is subset id and value is last version name.
"""
_pipeline = [
# Find all versions of those subsets
{"$match": {
"type": "version",
"parent": {"$in": subset_ids}
}},
# Sorting versions all together
{"$sort": {"name": 1}},
# Group them by "parent", but only take the last
{"$group": {
"_id": "$parent",
"_version_id": {"$last": "$_id"},
"name": {"$last": "$name"}
}}
]
last_version_by_subset_id = {}
for doc in io.aggregate(_pipeline):
subset_id = doc["_id"]
last_version_by_subset_id[subset_id] = doc["name"]
return last_version_by_subset_id
def fill_anatomy_data(self, context):
self.log.debug("Storing anatomy data to instance data.")
project_doc = context.data["projectEntity"]
context_asset_doc = context.data["assetEntity"]
for instance in context:
version_number = instance.data.get("version")
# If version is not specified for instance or context
if version_number is None:
# TODO we should be able to change default version by studio
# preferences (like start with version number `0`)
version_number = 1
# use latest version (+1) if already any exist
latest_version = instance.data["latestVersion"]
if latest_version is not None:
version_number += int(latest_version)
anatomy_updates = {
"asset": instance.data["asset"],
"family": instance.data["family"],
"subset": instance.data["subset"],
"version": version_number
}
# Hiearchy
asset_doc = instance.data.get("assetEntity")
if asset_doc and asset_doc["_id"] != context_asset_doc["_id"]:
parents = asset_doc["data"].get("parents") or list()
anatomy_updates["hierarchy"] = "/".join(parents)
# Task
task_name = instance.data.get("task")
if task_name:
anatomy_updates["task"] = task_name
# Additional data
resolution_width = instance.data.get("resolutionWidth")
if resolution_width:
anatomy_updates["resolution_width"] = resolution_width
resolution_height = instance.data.get("resolutionHeight")
if resolution_height:
anatomy_updates["resolution_height"] = resolution_height
pixel_aspect = instance.data.get("pixelAspect")
if pixel_aspect:
anatomy_updates["pixel_aspect"] = float(
"{:0.2f}".format(float(pixel_aspect))
)
fps = instance.data.get("fps")
if fps:
anatomy_updates["fps"] = float("{:0.2f}".format(float(fps)))
anatomy_data = copy.deepcopy(context.data["anatomyData"])
anatomy_data.update(anatomy_updates)
# Store anatomy data
instance.data["projectEntity"] = project_doc
instance.data["anatomyData"] = anatomy_data
instance.data["version"] = version_number
# Log collected data
instance_name = instance.data["name"]
instance_label = instance.data.get("label")
if instance_label:
instance_name += "({})".format(instance_label)
self.log.debug("Anatomy data for instance {}: {}".format(
instance_name,
json.dumps(anatomy_data, indent=4)
))

View file

@ -195,11 +195,14 @@ class ExtractBurnin(pype.api.Extractor):
if "delete" in new_repre["tags"]:
new_repre["tags"].remove("delete")
# Update name and outputName to be able have multiple outputs
# Join previous "outputName" with filename suffix
new_name = "_".join([new_repre["outputName"], filename_suffix])
new_repre["name"] = new_name
new_repre["outputName"] = new_name
if len(repre_burnin_defs.keys()) > 1:
# Update name and outputName to be
# able have multiple outputs in case of more burnin presets
# Join previous "outputName" with filename suffix
new_name = "_".join(
[new_repre["outputName"], filename_suffix])
new_repre["name"] = new_name
new_repre["outputName"] = new_name
# Prepare paths and files for process.
self.input_output_paths(new_repre, temp_data, filename_suffix)

View file

@ -1,6 +1,6 @@
import pyblish.api
from avalon import io
from copy import deepcopy
class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
"""Create entities in Avalon based on collected data."""
@ -14,14 +14,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
if "hierarchyContext" not in context.data:
self.log.info("skipping IntegrateHierarchyToAvalon")
return
hierarchy_context = deepcopy(context.data["hierarchyContext"])
if not io.Session:
io.install()
active_assets = []
hierarchy_context = context.data["hierarchyContext"]
hierarchy_assets = self._get_assets(hierarchy_context)
# filter only the active publishing insatnces
for instance in context:
if instance.data.get("publish") is False:
@ -32,13 +30,13 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
active_assets.append(instance.data["asset"])
# filter out only assets which are activated as isntances
new_hierarchy_assets = {k: v for k, v in hierarchy_assets.items()
if k in active_assets}
# remove duplicity in list
self.active_assets = list(set(active_assets))
self.log.debug("__ self.active_assets: {}".format(self.active_assets))
# modify the hierarchy context so there are only fitred assets
self._set_assets(hierarchy_context, new_hierarchy_assets)
hierarchy_context = self._get_assets(hierarchy_context)
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
input_data = context.data["hierarchyContext"] = hierarchy_context
self.project = None
@ -178,35 +176,18 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
Usually the last part of deep dictionary which
is not having any children
"""
input_dict_copy = deepcopy(input_dict)
for key in input_dict.keys():
self.log.debug("__ key: {}".format(key))
# check if child key is available
if input_dict[key].get("childs"):
# loop deeper
return self._get_assets(input_dict[key]["childs"])
input_dict_copy[key]["childs"] = self._get_assets(
input_dict[key]["childs"])
else:
# give the dictionary with assets
return input_dict
# filter out unwanted assets
if key not in self.active_assets:
input_dict_copy.pop(key, None)
def _set_assets(self, input_dict, new_assets=None):
""" Modify the hierarchy context dictionary.
It will replace the asset dictionary with only the filtred one.
"""
for key in input_dict.keys():
# check if child key is available
if input_dict[key].get("childs"):
# return if this is just for testing purpose and no
# new_assets property is avalable
if not new_assets:
return True
# test for deeper inner children availabelity
if self._set_assets(input_dict[key]["childs"]):
# if one level deeper is still children available
# then process farther
self._set_assets(input_dict[key]["childs"], new_assets)
else:
# or just assign the filtred asset ditionary
input_dict[key]["childs"] = new_assets
else:
# test didnt find more childs in input dictionary
return None
return input_dict_copy

View file

@ -633,6 +633,26 @@ class ExtractReview(pyblish.api.InstancePlugin):
input_width = int(input_data["width"])
input_height = int(input_data["height"])
# Make sure input width and height is not an odd number
input_width_is_odd = bool(input_width % 2 != 0)
input_height_is_odd = bool(input_height % 2 != 0)
if input_width_is_odd or input_height_is_odd:
# Add padding to input and make sure this filter is at first place
filters.append("pad=width=ceil(iw/2)*2:height=ceil(ih/2)*2")
# Change input width or height as first filter will change them
if input_width_is_odd:
self.log.info((
"Converting input width from odd to even number. {} -> {}"
).format(input_width, input_width + 1))
input_width += 1
if input_height_is_odd:
self.log.info((
"Converting input height from odd to even number. {} -> {}"
).format(input_height, input_height + 1))
input_height += 1
self.log.debug("pixel_aspect: `{}`".format(pixel_aspect))
self.log.debug("input_width: `{}`".format(input_width))
self.log.debug("input_height: `{}`".format(input_height))
@ -654,6 +674,22 @@ class ExtractReview(pyblish.api.InstancePlugin):
output_width = int(output_width)
output_height = int(output_height)
# Make sure output width and height is not an odd number
# When this can happen:
# - if output definition has set width and height with odd number
# - `instance.data` contain width and height with odd numbeer
if output_width % 2 != 0:
self.log.warning((
"Converting output width from odd to even number. {} -> {}"
).format(output_width, output_width + 1))
output_width += 1
if output_height % 2 != 0:
self.log.warning((
"Converting output height from odd to even number. {} -> {}"
).format(output_height, output_height + 1))
output_height += 1
self.log.debug(
"Output resolution is {}x{}".format(output_width, output_height)
)

View file

@ -0,0 +1,31 @@
import pyblish.api
import os
class ValidateIntent(pyblish.api.ContextPlugin):
"""Validate intent of the publish.
It is required to fill the intent of this publish. Chech the log
for more details
"""
order = pyblish.api.ValidatorOrder
label = "Validate Intent"
# TODO: this should be off by default and only activated viac config
tasks = ["animation"]
hosts = ["harmony"]
if os.environ.get("AVALON_TASK") not in tasks:
active = False
def process(self, context):
msg = (
"Please make sure that you select the intent of this publish."
)
intent = context.data.get("intent")
self.log.debug(intent)
assert intent, msg
intent_value = intent.get("value")
assert intent is not "", msg

View file

@ -1,7 +1,7 @@
from maya import cmds, mel
import pymel.core as pc
from avalon import api
from avalon import api, io
from avalon.maya.pipeline import containerise
from avalon.maya import lib
@ -58,6 +58,13 @@ class AudioLoader(api.Loader):
type="string"
)
# Set frame range.
version = io.find_one({"_id": representation["parent"]})
subset = io.find_one({"_id": version["parent"]})
asset = io.find_one({"_id": subset["parent"]})
audio_node.sourceStart.set(1 - asset["data"]["frameStart"])
audio_node.sourceEnd.set(asset["data"]["frameEnd"])
def switch(self, container, representation):
self.update(container, representation)

View file

@ -1,7 +1,7 @@
import pymel.core as pc
import maya.cmds as cmds
from avalon import api
from avalon import api, io
from avalon.maya.pipeline import containerise
from avalon.maya import lib
from Qt import QtWidgets
@ -147,6 +147,17 @@ class ImagePlaneLoader(api.Loader):
type="string"
)
# Set frame range.
version = io.find_one({"_id": representation["parent"]})
subset = io.find_one({"_id": version["parent"]})
asset = io.find_one({"_id": subset["parent"]})
start_frame = asset["data"]["frameStart"]
end_frame = asset["data"]["frameEnd"]
image_plane_shape.frameOffset.set(1 - start_frame)
image_plane_shape.frameIn.set(start_frame)
image_plane_shape.frameOut.set(end_frame)
image_plane_shape.frameCache.set(end_frame)
def switch(self, container, representation):
self.update(container, representation)

View file

@ -101,7 +101,7 @@ class ExtractCameraMayaScene(pype.api.Extractor):
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
except KeyError:
# no preset found
pass

View file

@ -33,7 +33,7 @@ class ExtractMayaSceneRaw(pype.api.Extractor):
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
except KeyError:
# no preset found
pass
# Define extract output file path

View file

@ -41,7 +41,7 @@ class ExtractModel(pype.api.Extractor):
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
except KeyError:
# no preset found
pass
# Define extract output file path

View file

@ -111,7 +111,7 @@ class ExtractYetiRig(pype.api.Extractor):
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except AttributeError:
except KeyError:
# no preset found
pass
yeti_nodes = cmds.ls(instance, type="pgYetiMaya")

View file

@ -76,7 +76,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
# check if audio stream is in input video file
ffprob_cmd = (
"{ffprobe_path} -i {full_input_path} -show_streams "
"{ffprobe_path} -i \"{full_input_path}\" -show_streams "
"-select_streams a -loglevel error"
).format(**locals())
self.log.debug("ffprob_cmd: {}".format(ffprob_cmd))
@ -106,7 +106,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
# try to get video native resolution data
try:
resolution_output = pype.api.subprocess((
"{ffprobe_path} -i {full_input_path} -v error "
"{ffprobe_path} -i \"{full_input_path}\" -v error "
"-select_streams v:0 -show_entries "
"stream=width,height -of csv=s=x:p=0"
).format(**locals()))
@ -193,7 +193,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
# append ffmpeg input video clip
input_args.append("-ss {:0.2f}".format(start_sec))
input_args.append("-t {:0.2f}".format(duration_sec))
input_args.append("-i {}".format(full_input_path))
input_args.append("-i \"{}\"".format(full_input_path))
# add copy audio video codec if only shortening clip
if ("_cut-bigger" in tags) and (not empty_add):
@ -203,8 +203,7 @@ class ExtractReviewCutUpVideo(pype.api.Extractor):
output_args.append("-intra")
# output filename
output_args.append("-y")
output_args.append(full_output_path)
output_args.append("-y \"{}\"".format(full_output_path))
mov_args = [
ffmpeg_path,

View file

@ -17,13 +17,13 @@ class CollectClipInstances(pyblish.api.InstancePlugin):
subsets = {
"referenceMain": {
"family": "review",
"families": ["review", "ftrack"],
"families": ["clip", "ftrack"],
# "ftrackFamily": "review",
"extension": ".mp4"
},
"audioMain": {
"family": "audio",
"families": ["ftrack"],
"families": ["clip", "ftrack"],
# "ftrackFamily": "audio",
"extension": ".wav",
# "version": 1

View file

@ -0,0 +1,29 @@
"""
Requires:
Nothing
Provides:
Instance
"""
import pyblish.api
from pprint import pformat
class CollectInstanceData(pyblish.api.InstancePlugin):
"""
Collector with only one reason for its existence - remove 'ftrack'
family implicitly added by Standalone Publisher
"""
label = "Collect instance data"
order = pyblish.api.CollectorOrder + 0.49
families = ["render", "plate"]
hosts = ["standalonepublisher"]
def process(self, instance):
fps = instance.data["assetEntity"]["data"]["fps"]
instance.data.update({
"fps": fps
})
self.log.debug(f"instance.data: {pformat(instance.data)}")

View file

@ -10,7 +10,7 @@ class ExtractShotData(pype.api.Extractor):
label = "Extract Shot Data"
hosts = ["standalonepublisher"]
families = ["review", "audio"]
families = ["clip"]
# presets

View file

@ -64,6 +64,7 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
else:
# Convert to jpeg if not yet
full_input_path = os.path.join(thumbnail_repre["stagingDir"], file)
full_input_path = '"{}"'.format(full_input_path)
self.log.info("input {}".format(full_input_path))
full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1]

View file

@ -1,5 +1,3 @@
import os
import pyblish.api
import pype.api
@ -9,10 +7,14 @@ class ValidateEditorialResources(pyblish.api.InstancePlugin):
label = "Validate Editorial Resources"
hosts = ["standalonepublisher"]
families = ["audio", "review"]
families = ["clip"]
order = pype.api.ValidateContentsOrder
def process(self, instance):
self.log.debug(
f"Instance: {instance}, Families: "
f"{[instance.data['family']] + instance.data['families']}")
check_file = instance.data["editorialVideoPath"]
msg = f"Missing \"{check_file}\"."
assert check_file, msg

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

View file

@ -15,7 +15,7 @@ ffprobe_path = pype.lib.get_ffmpeg_tool_path("ffprobe")
FFMPEG = (
'{} -loglevel panic -i %(input)s %(filters)s %(args)s%(output)s'
'{} -loglevel panic -i "%(input)s" %(filters)s %(args)s%(output)s'
).format(ffmpeg_path)
FFPROBE = (

View file

@ -1,9 +1,11 @@
from .lib import (
system_settings,
project_settings
project_settings,
environments
)
__all__ = (
"system_settings",
"project_settings"
"project_settings",
"environments"
)

View file

@ -3,5 +3,6 @@
"PYTHONPATH": [
"{PYPE_SETUP_PATH}/repos/avalon-core/setup/blender",
"{PYTHONPATH}"
]
],
"CREATE_NEW_CONSOLE": "yes"
}

View file

@ -6,9 +6,9 @@
"PYPE_PROJECT_PLUGINS": "",
"STUDIO_SOFT": "{PYP_SETUP_ROOT}/soft",
"FFMPEG_PATH": {
"windows": "{VIRTUAL_ENV}/localized/ffmpeg_exec/windows/bin;{PYPE_SETUP_PATH}/vendor/ffmpeg_exec/windows/bin",
"darwin": "{VIRTUAL_ENV}/localized/ffmpeg_exec/darwin/bin:{PYPE_SETUP_PATH}/vendor/ffmpeg_exec/darwin/bin",
"linux": "{VIRTUAL_ENV}/localized/ffmpeg_exec/linux:{PYPE_SETUP_PATH}/vendor/ffmpeg_exec/linux"
"windows": "{VIRTUAL_ENV}/localized/ffmpeg_exec/windows/bin;{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/windows/bin",
"darwin": "{VIRTUAL_ENV}/localized/ffmpeg_exec/darwin/bin:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/darwin/bin",
"linux": "{VIRTUAL_ENV}/localized/ffmpeg_exec/linux:{PYPE_SETUP_PATH}/vendor/bin/ffmpeg_exec/linux"
},
"DJV_PATH": {
"windows": [

View file

@ -0,0 +1,7 @@
{
"AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH": "1",
"PYTHONPATH": "{PYTHONPATH}",
"PYPE_LOG_NO_COLORS": "Yes",
"WEBSOCKET_URL": "ws://localhost:8099/ws/",
"WORKFILES_SAVE_AS": "Yes"
}

View file

@ -0,0 +1,8 @@
application_dir = "blender"
executable = "blender_2.80"
schema = "avalon-core:application-1.0"
label = "Blender"
label_variant = "2.80"
ftrack_label = "Blender"
icon = "app_icons/blender.png"
ftrack_icon = "{}/app_icons/blender.png"

View file

@ -0,0 +1,9 @@
application_dir = "blender"
executable = "blender_2.81"
schema = "avalon-core:application-1.0"
label = "Blender"
label_variant = "2.81"
icon = "app_icons/blender.png"
ftrack_label = "Blender"
ftrack_icon = '{}/app_icons/blender.png'

View file

@ -0,0 +1,9 @@
application_dir = "blender"
executable = "blender_2.82"
schema = "avalon-core:application-1.0"
label = "Blender"
label_variant = "2.82"
icon = "app_icons/blender.png"
ftrack_label = "Blender"
ftrack_icon = '{}/app_icons/blender.png'

View file

@ -0,0 +1,9 @@
application_dir = "blender"
executable = "blender_2.83"
schema = "avalon-core:application-1.0"
label = "Blender"
label_variant = "2.83"
icon = "app_icons/blender.png"
ftrack_label = "Blender"
ftrack_icon = '{}/app_icons/blender.png'

View file

@ -0,0 +1,9 @@
executable = "celaction_local"
schema = "avalon-core:application-1.0"
application_dir = "celaction"
label = "CelAction2D"
icon = "app_icons/celaction_local.png"
launch_hook = "pype/hooks/celaction/prelaunch.py/CelactionPrelaunchHook"
ftrack_label = "CelAction2D"
ftrack_icon = '{}/app_icons/celaction_local.png'

View file

@ -0,0 +1,8 @@
schema = "avalon-core:application-1.0"
application_dir = "shell"
executable = "celaction_publish"
label = "Celaction Shell"
icon = "app_icons/celaction.png"
[environment]
CREATE_NEW_CONSOLE = "Yes"

View file

@ -0,0 +1,2 @@
#!/usr/bin/env bash
open -a blender $@

View file

@ -0,0 +1,9 @@
#!/usr/bin/env bash
DIRNAME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
set >~/environment.tmp
if [ $? -ne -0 ] ; then
echo "ERROR: cannot write to '~/environment.tmp'!"
read -n 1 -s -r -p "Press any key to exit"
return
fi
open -a Terminal.app "$DIRNAME/harmony_17_launch"

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source ~/environment.tmp
export $(cut -d= -f1 ~/environment.tmp)
exe="/Applications/Toon Boom Harmony 17 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium"
$PYPE_PYTHON_EXE -c "import avalon.harmony;avalon.harmony.launch('$exe')"

View file

@ -0,0 +1,2 @@
#!/usr/bin/env bash
open /usr/bin/python3 --args $@

View file

@ -0,0 +1,9 @@
application_dir = "harmony"
label = "Harmony"
label_variant = "17"
ftrack_label = "Harmony"
schema = "avalon-core:application-1.0"
executable = "harmony_17"
description = ""
icon = "app_icons/harmony.png"
ftrack_icon = '{}/app_icons/harmony.png'

View file

@ -0,0 +1,8 @@
executable = "houdini_16"
schema = "avalon-core:application-1.0"
application_dir = "houdini"
label = "Houdini"
label_variant = "16"
ftrack_label = "Houdini"
icon = "app_icons/houdini.png"
ftrack_icon = '{}/app_icons/houdini.png'

View file

@ -0,0 +1,8 @@
executable = "houdini_17"
schema = "avalon-core:application-1.0"
application_dir = "houdini"
label = "Houdini"
label_variant = "17"
ftrack_label = "Houdini"
icon = "app_icons/houdini.png"
ftrack_icon = '{}/app_icons/houdini.png'

View file

@ -0,0 +1,8 @@
executable = "houdini_18"
schema = "avalon-core:application-1.0"
application_dir = "houdini"
label = "Houdini"
label_variant = "18"
ftrack_label = "Houdini"
icon = "app_icons/houdini.png"
ftrack_icon = '{}/app_icons/houdini.png'

View file

@ -0,0 +1,8 @@
#!/usr/bin/env bash
maya_path = "/usr/autodesk/maya2016/bin/maya"
if [[ -z $PYPE_LOG_NO_COLORS ]]; then
$maya_path -file "$AVALON_LAST_WORKFILE" $@
else
$maya_path $@

View file

@ -0,0 +1,8 @@
#!/usr/bin/env bash
maya_path = "/usr/autodesk/maya2017/bin/maya"
if [[ -z $AVALON_LAST_WORKFILE ]]; then
$maya_path -file "$AVALON_LAST_WORKFILE" $@
else
$maya_path $@

View file

@ -0,0 +1,8 @@
#!/usr/bin/env bash
maya_path = "/usr/autodesk/maya2018/bin/maya"
if [[ -z $AVALON_LAST_WORKFILE ]]; then
$maya_path -file "$AVALON_LAST_WORKFILE" $@
else
$maya_path $@

View file

@ -0,0 +1,8 @@
#!/usr/bin/env bash
maya_path = "/usr/autodesk/maya2019/bin/maya"
if [[ -z $AVALON_LAST_WORKFILE ]]; then
$maya_path -file "$AVALON_LAST_WORKFILE" $@
else
$maya_path $@

View file

@ -0,0 +1,8 @@
#!/usr/bin/env bash
maya_path = "/usr/autodesk/maya2020/bin/maya"
if [[ -z $AVALON_LAST_WORKFILE ]]; then
$maya_path -file "$AVALON_LAST_WORKFILE" $@
else
$maya_path $@

View file

@ -0,0 +1,2 @@
#!/usr/bin/env bash
gnome-terminal -e '/usr/local/Nuke11.3v5/Nuke11.3'

View file

@ -0,0 +1,2 @@
#!/usr/bin/env bash
gnome-terminal -e '/usr/local/Nuke12.0v1/Nuke12.0'

Some files were not shown because too many files have changed in this diff Show more